diff --git a/.coveragerc b/.coveragerc
index dd39c8546c..1ba5bb57db 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -17,6 +17,8 @@
# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
+omit =
+ google/cloud/__init__.py
[report]
fail_under = 100
@@ -28,8 +30,11 @@ exclude_lines =
def __repr__
# Ignore abstract methods
raise NotImplementedError
+ # Ignore setuptools-less fallback
+ except pkg_resources.DistributionNotFound:
omit =
*/gapic/*.py
*/proto/*.py
*/core/*.py
- */site-packages/*.py
\ No newline at end of file
+ */site-packages/*.py
+ google/cloud/__init__.py
diff --git a/.flake8 b/.flake8
index 20fe9bda2e..ed9316381c 100644
--- a/.flake8
+++ b/.flake8
@@ -21,6 +21,8 @@ exclude =
# Exclude generated code.
**/proto/**
**/gapic/**
+ **/services/**
+ **/types/**
*_pb2.py
# Standard linting exemptions.
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 39a8fc72bc..f8063630ab 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -8,3 +8,5 @@
# The firestore-dpe team is the default owner for anything not
# explicitly taken by someone else.
* @googleapis/firestore-dpe
+
+/samples/ @googleapis/firestore-dpe @googleapis/python-samples-owners
diff --git a/google/cloud/firestore_admin_v1/gapic/__init__.py b/.github/snippet-bot.yml
similarity index 100%
rename from google/cloud/firestore_admin_v1/gapic/__init__.py
rename to .github/snippet-bot.yml
diff --git a/.gitignore b/.gitignore
index 3fb06e09ce..8e08cebce7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,7 @@
dist
build
eggs
+.eggs
parts
bin
var
@@ -28,6 +29,7 @@ pip-log.txt
.nox
.cache
.pytest_cache
+.pytype
# Mac
@@ -45,14 +47,16 @@ pip-log.txt
# Built documentation
docs/_build
bigquery/docs/generated
+docs.metadata
# Virtual environment
env/
coverage.xml
+sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
# Make sure a generated file isn't accidentally committed.
pylintrc
-pylintrc.test
\ No newline at end of file
+pylintrc.test
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 660f5a2044..f26796a0b9 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -23,6 +23,9 @@ export PYTHONUNBUFFERED=1
# Debug: show build environment
env | grep KOKORO
+# Setup firestore account credentials
+export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json
+
# Setup service account credentials.
export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
@@ -36,4 +39,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation
python3.6 -m pip install --upgrade --quiet nox
python3.6 -m nox --version
-python3.6 -m nox
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3.6 -m nox -s "${NOX_SESSION:-}"
+else
+ python3.6 -m nox
+fi
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 0000000000..412b0b56a9
--- /dev/null
+++ b/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,98 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+
+COPY fetch_gpg_keys.sh /tmp
+# Install the desired versions of Python.
+RUN set -ex \
+ && export GNUPGHOME="$(mktemp -d)" \
+ && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
+ && /tmp/fetch_gpg_keys.sh \
+ && for PYTHON_VERSION in 3.7.8 3.8.5; do \
+ wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
+ && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
+ && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
+ && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
+ && mkdir -p /usr/src/python-${PYTHON_VERSION} \
+ && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
+ && rm python-${PYTHON_VERSION}.tar.xz \
+ && cd /usr/src/python-${PYTHON_VERSION} \
+ && ./configure \
+ --enable-shared \
+ # This works only on Python 2.7 and throws a warning on every other
+ # version, but seems otherwise harmless.
+ --enable-unicode=ucs4 \
+ --with-system-ffi \
+ --without-ensurepip \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
+ ; done \
+ && rm -rf "${GNUPGHOME}" \
+ && rm -rf /usr/src/python* \
+ && rm -rf ~/.cache/
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.7 /tmp/get-pip.py \
+ && python3.8 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+CMD ["python3.7"]
diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh
new file mode 100755
index 0000000000..d653dd868e
--- /dev/null
+++ b/.kokoro/docker/docs/fetch_gpg_keys.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A script to fetch gpg keys with retry.
+# Avoid jinja parsing the file.
+#
+
+function retry {
+ if [[ "${#}" -le 1 ]]; then
+ echo "Usage: ${0} retry_count commands.."
+ exit 1
+ fi
+ local retries=${1}
+ local command="${@:2}"
+ until [[ "${retries}" -le 0 ]]; do
+ $command && return 0
+ if [[ $? -ne 0 ]]; then
+ echo "command failed, retrying"
+ ((retries--))
+ fi
+ done
+ return 1
+}
+
+# 3.6.9, 3.7.5 (Ned Deily)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
+
+# 3.8.0 (Łukasz Langa)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ E3FF2839C048B25C084DEBE9B26995E310250568
+
+#
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index f8f29f5dbe..edd025de31 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -11,12 +11,12 @@ action {
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-firestore/.kokoro/trampoline.sh"
+build_file: "python-firestore/.kokoro/trampoline_v2.sh"
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
}
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
@@ -28,6 +28,23 @@ env_vars: {
value: "docs-staging"
}
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "docs-staging-v2"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
# Fetch the token needed for reporting release status to GitHub
before_action {
fetch_keystore {
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 0000000000..1118107829
--- /dev/null
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,17 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 0000000000..f52514257e
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 6745696253..8acb14e802 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -13,33 +13,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
-cd github/python-firestore
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+export PATH="${HOME}/.local/bin:${PATH}"
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
# build docs
nox -s docs
-python3 -m pip install gcp-docuploader
-
-# install a json parser
-sudo apt-get update
-sudo apt-get -y install software-properties-common
-sudo add-apt-repository universe
-sudo apt-get update
-sudo apt-get -y install jq
+python3 -m pip install --user gcp-docuploader
# create metadata
python3 -m docuploader create-metadata \
@@ -54,4 +42,23 @@ python3 -m docuploader create-metadata \
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 76cbb79b8a..32388c2581 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Start the releasetool reporter
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index b7bbee28d4..8905fd5e9d 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
value: "github/python-firestore/.kokoro/release.sh"
}
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
}
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 0000000000..89fa672bf7
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-firestore/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-firestore/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 0000000000..50fec96497
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 0000000000..b9a59484d3
--- /dev/null
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.6"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-firestore/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-firestore/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 0000000000..7218af1499
--- /dev/null
+++ b/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 0000000000..50fec96497
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 0000000000..ac1589d36b
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-firestore/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-firestore/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 0000000000..50fec96497
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 0000000000..82693f383b
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-firestore/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-firestore/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 0000000000..50fec96497
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 0000000000..c841366a90
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,110 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-firestore
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+fi
+
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the Build Cop Bot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251f3e..f39236e943 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
new file mode 100755
index 0000000000..719bcd5ba8
--- /dev/null
+++ b/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For Build Cop Bot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/.trampolinerc b/.trampolinerc
new file mode 100644
index 0000000000..995ee29111
--- /dev/null
+++ b/.trampolinerc
@@ -0,0 +1,51 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d1367fb302..d1b8008c7a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,240 @@
[1]: https://pypi.org/project/google-cloud-firestore/#history
+## [2.0.0](https://www.github.com/googleapis/python-firestore/compare/v1.9.0...v2.0.0) (2020-11-06)
+
+
+### ⚠ BREAKING CHANGES
+
+* remove support for Python 2.7
+* remove v1beta1 surface for v2 (#96)
+* Begin using new microgenerator for v2 firestore (#91)
+* from `firestore-0.30.0`: revert to merge not being an option;
+
+### Features
+
+* add client_options to base client class ([#150](https://www.github.com/googleapis/python-firestore/issues/150)) ([f3bedc1](https://www.github.com/googleapis/python-firestore/commit/f3bedc1efae4430c6853581fafef06d613548314))
+* add inline type hints and pytype ci ([#134](https://www.github.com/googleapis/python-firestore/issues/134)) ([afff842](https://www.github.com/googleapis/python-firestore/commit/afff842a3356cbe5b0342be57341c12b2d601fda))
+* add retry/timeout to manual surface ([#222](https://www.github.com/googleapis/python-firestore/issues/222)) ([db5f286](https://www.github.com/googleapis/python-firestore/commit/db5f286772592460b2bf02df25a121994889585d)), closes [#221](https://www.github.com/googleapis/python-firestore/issues/221)
+* add support for not-in and not-eq query operators ([#202](https://www.github.com/googleapis/python-firestore/issues/202)) ([1d09f21](https://www.github.com/googleapis/python-firestore/commit/1d09f21f6c8cb7f69f0e30a960418f0f6899aa01))
+* add type hints for method params ([#182](https://www.github.com/googleapis/python-firestore/issues/182)) ([9b6c2f3](https://www.github.com/googleapis/python-firestore/commit/9b6c2f33351c65901ea648e4407b2817e5e70957))
+* asyncio microgen batch ([#122](https://www.github.com/googleapis/python-firestore/issues/122)) ([a4e5b00](https://www.github.com/googleapis/python-firestore/commit/a4e5b00a4d59e3416061d5c1ed32a111097e88b3))
+* asyncio microgen client ([#118](https://www.github.com/googleapis/python-firestore/issues/118)) ([de4cc44](https://www.github.com/googleapis/python-firestore/commit/de4cc445e34e4a186ccc17bf143e04b45fb35f0b))
+* asyncio microgen collection ([#119](https://www.github.com/googleapis/python-firestore/issues/119)) ([6281a67](https://www.github.com/googleapis/python-firestore/commit/6281a67e0ead38e7b2e477b7f077da7e0457aa9b))
+* asyncio microgen document ([#121](https://www.github.com/googleapis/python-firestore/issues/121)) ([31faecb](https://www.github.com/googleapis/python-firestore/commit/31faecb2ab2956bad64b0852f1fe54a05d8907f9))
+* asyncio microgen query ([#127](https://www.github.com/googleapis/python-firestore/issues/127)) ([178fa2c](https://www.github.com/googleapis/python-firestore/commit/178fa2c2a51a6bd6ef7a3c41b8307e44b5eab062))
+* asyncio microgen transaction ([#123](https://www.github.com/googleapis/python-firestore/issues/123)) ([35185a8](https://www.github.com/googleapis/python-firestore/commit/35185a849053877c9cc561e75cdb4cd7338cc508))
+* asyncio system tests ([#132](https://www.github.com/googleapis/python-firestore/issues/132)) ([4256a85](https://www.github.com/googleapis/python-firestore/commit/4256a856e6f1531959ffc080dfc8c8b3a7263ea5))
+* Begin using new microgenerator for v2 firestore ([#91](https://www.github.com/googleapis/python-firestore/issues/91)) ([e0add08](https://www.github.com/googleapis/python-firestore/commit/e0add0860ca958d139787cdbb7fceb570fbb80ab))
+* create async interface ([#61](https://www.github.com/googleapis/python-firestore/issues/61)) ([eaba25e](https://www.github.com/googleapis/python-firestore/commit/eaba25e892fa33c20ecc7aeab1528a004cbf99f7))
+* Create CODEOWNERS ([#40](https://www.github.com/googleapis/python-firestore/issues/40)) ([a0cbf40](https://www.github.com/googleapis/python-firestore/commit/a0cbf403fe88f07c83bec81f275ac168be573e93))
+* improve type information ([#176](https://www.github.com/googleapis/python-firestore/issues/176)) ([30bb3fb](https://www.github.com/googleapis/python-firestore/commit/30bb3fb5c36648d3b8acf76349a5726d7a5f135d))
+* integrate limit to last ([#145](https://www.github.com/googleapis/python-firestore/issues/145)) ([55da695](https://www.github.com/googleapis/python-firestore/commit/55da695710d0408fc314ffe5cc6d7a48cb71bc3b)), closes [#57](https://www.github.com/googleapis/python-firestore/issues/57)
+* partition queries ([#210](https://www.github.com/googleapis/python-firestore/issues/210)) ([4f75a75](https://www.github.com/googleapis/python-firestore/commit/4f75a75170be1bbb310b9e4741f4862d694b5bf5))
+* remove v1beta1 surface for v2 ([#96](https://www.github.com/googleapis/python-firestore/issues/96)) ([b4a8eb9](https://www.github.com/googleapis/python-firestore/commit/b4a8eb97a68b4c7d1bc9faf0b113dca4476d9f1f))
+* use 'update_transforms' ([#219](https://www.github.com/googleapis/python-firestore/issues/219)) ([c122e41](https://www.github.com/googleapis/python-firestore/commit/c122e4186808468a2ff82e9cc54b501809519859)), closes [#217](https://www.github.com/googleapis/python-firestore/issues/217)
+* use `DatetimeWithNanoseconds` throughout library ([#116](https://www.github.com/googleapis/python-firestore/issues/116)) ([1801ba2](https://www.github.com/googleapis/python-firestore/commit/1801ba2a0e990c533865fef200bbcc3818b3b486))
+* **firestore:** add `IN`, `ARRAY_CONTAINS_ANY` operators; update docstrings (via synth) ([#9439](https://www.github.com/googleapis/python-firestore/issues/9439)) ([107e526](https://www.github.com/googleapis/python-firestore/commit/107e526cb1d887096e99ce86f7125760b325b2bb))
+* **firestore:** add v1beta1 deprecation annotation ([#34](https://www.github.com/googleapis/python-firestore/issues/34)) ([b9e2ab5](https://www.github.com/googleapis/python-firestore/commit/b9e2ab58a41c7bbab28028cb88f84bd6013816ed))
+* **firestore:** surface new 'IN' and 'ARRAY_CONTAINS_ANY' operators ([#9541](https://www.github.com/googleapis/python-firestore/issues/9541)) ([5e9fe4f](https://www.github.com/googleapis/python-firestore/commit/5e9fe4f9ba21b9c38ebd41eb7ed083b335472e0b))
+
+
+### Bug Fixes
+
+* add import message via synth ([#231](https://www.github.com/googleapis/python-firestore/issues/231)) ([5fb02e9](https://www.github.com/googleapis/python-firestore/commit/5fb02e9b9521938ec1040611cf7086077d07aac2)), closes [#227](https://www.github.com/googleapis/python-firestore/issues/227) [#228](https://www.github.com/googleapis/python-firestore/issues/228) [#229](https://www.github.com/googleapis/python-firestore/issues/229)
+* add mocks to query get tests ([#109](https://www.github.com/googleapis/python-firestore/issues/109)) ([c4c5bfa](https://www.github.com/googleapis/python-firestore/commit/c4c5bfab0e5942706f2b55148e5e4f9fbd2e29f3))
+* async_document docs to match expected usecase ([#129](https://www.github.com/googleapis/python-firestore/issues/129)) ([f26f222](https://www.github.com/googleapis/python-firestore/commit/f26f222a82028568c0974f379454c69a0fc549ca))
+* asyncio microgen client get_all type ([#126](https://www.github.com/googleapis/python-firestore/issues/126)) ([9095368](https://www.github.com/googleapis/python-firestore/commit/9095368eaec4271b87ad792ff9bbd065364109f6))
+* await on to_wrap in AsyncTransactional ([#147](https://www.github.com/googleapis/python-firestore/issues/147)) ([e640e66](https://www.github.com/googleapis/python-firestore/commit/e640e663f525233a8173767f6886537dfd97b121))
+* constructor invalid path tests ([#114](https://www.github.com/googleapis/python-firestore/issues/114)) ([edf7bd1](https://www.github.com/googleapis/python-firestore/commit/edf7bd1879587c05b37910b0a870ba092c6f10ef))
+* coverage to 99p ([8ddfe1d](https://www.github.com/googleapis/python-firestore/commit/8ddfe1df7df501524e4d406d9dd3b396fc2680eb))
+* harden version data gathering against DistributionNotFound ([#212](https://www.github.com/googleapis/python-firestore/issues/212)) ([20b7260](https://www.github.com/googleapis/python-firestore/commit/20b72603eb0ae3164f68822c62378853be59d232))
+* name parameter to indicate snapshot support ([#169](https://www.github.com/googleapis/python-firestore/issues/169)) ([be98897](https://www.github.com/googleapis/python-firestore/commit/be988971cc1bbbc3616a849037dafc8cc0bb5745)), closes [#56](https://www.github.com/googleapis/python-firestore/issues/56)
+* pytype client errors ([#146](https://www.github.com/googleapis/python-firestore/issues/146)) ([eb19712](https://www.github.com/googleapis/python-firestore/commit/eb1971274038a079be664004a29a40d9b151d964))
+* recover watch stream on more error types ([#9995](https://www.github.com/googleapis/python-firestore/issues/9995)) ([af5fd1d](https://www.github.com/googleapis/python-firestore/commit/af5fd1dabd411a67afa729d1954cb1b9edf4d619)), closes [#L817](https://www.github.com/googleapis/python-firestore/issues/L817)
+* remove six dependency ([#110](https://www.github.com/googleapis/python-firestore/issues/110)) ([6e597f2](https://www.github.com/googleapis/python-firestore/commit/6e597f2886ff0cd3a9027c434006af0f0895257b))
+* remove six dependency ([#120](https://www.github.com/googleapis/python-firestore/issues/120)) ([d82687d](https://www.github.com/googleapis/python-firestore/commit/d82687db3c55c478285d580547d263f1724a09b7))
+* remove six dependency ([#98](https://www.github.com/googleapis/python-firestore/issues/98)) ([b264ccb](https://www.github.com/googleapis/python-firestore/commit/b264ccb9e2618fb7b40d5b4375777363fc26a9a9)), closes [#94](https://www.github.com/googleapis/python-firestore/issues/94)
+* remove unnecessary dependency on libcst ([#220](https://www.github.com/googleapis/python-firestore/issues/220)) ([cd358db](https://www.github.com/googleapis/python-firestore/commit/cd358db784c4244271f197156662e38ed21d2f45))
+* Support more Python sequence types when encoding to Protobuf ([#21](https://www.github.com/googleapis/python-firestore/issues/21)) ([b1c5987](https://www.github.com/googleapis/python-firestore/commit/b1c5987c606a14874b412e70f93015e161e278d6))
+* type hint improvements ([#144](https://www.github.com/googleapis/python-firestore/issues/144)) ([d30fff8](https://www.github.com/googleapis/python-firestore/commit/d30fff8e42621d42d169e354948c26ee3e0d16f0))
+* **firestore:** fix get and getall method of transaction ([#16](https://www.github.com/googleapis/python-firestore/issues/16)) ([de3aca0](https://www.github.com/googleapis/python-firestore/commit/de3aca0e78b68f66eb76bc679c6e95b0746ad590))
+* **firestore:** fix lint ([#48](https://www.github.com/googleapis/python-firestore/issues/48)) ([7fa00c4](https://www.github.com/googleapis/python-firestore/commit/7fa00c49dc3fab1d687fff9246f3e5ff0682cac0))
+* respect transform values passed into collection.add ([#7072](https://www.github.com/googleapis/python-firestore/issues/7072)) ([c643d91](https://www.github.com/googleapis/python-firestore/commit/c643d914075c1bfc2549a56ec419aff90af4d8e7)), closes [#6826](https://www.github.com/googleapis/python-firestore/issues/6826)
+* update resume token for restarting BiDi streams ([#10282](https://www.github.com/googleapis/python-firestore/issues/10282)) ([61ec5a2](https://www.github.com/googleapis/python-firestore/commit/61ec5a2326aa101bbccbed229582570844e58bb7))
+* Update team to be in correct org ([#43](https://www.github.com/googleapis/python-firestore/issues/43)) ([bef5a3a](https://www.github.com/googleapis/python-firestore/commit/bef5a3af4613b5f9d753bb6f45275e480e4bb301))
+* **firestore:** simplify 'Collection.add', avoid spurious API call ([#9634](https://www.github.com/googleapis/python-firestore/issues/9634)) ([20f093e](https://www.github.com/googleapis/python-firestore/commit/20f093eb65014d307e402b774f14958a29043742)), closes [#9629](https://www.github.com/googleapis/python-firestore/issues/9629)
+
+
+### Reverts
+
+* Revert "Replace relative class refs with fully-qualifed names. (#8039)" (#8095) ([2441825](https://www.github.com/googleapis/python-firestore/commit/24418259483afab8bb9c1996d7bd5d28ab085773)), closes [#8039](https://www.github.com/googleapis/python-firestore/issues/8039) [#8095](https://www.github.com/googleapis/python-firestore/issues/8095)
+* Revert "Do not use easily-misread glyphs in Firestore auto-IDs." (#4589) ([bbfd2ff](https://www.github.com/googleapis/python-firestore/commit/bbfd2ffa614c11e294753915d967278b9e0284f0)), closes [#4589](https://www.github.com/googleapis/python-firestore/issues/4589) [#4588](https://www.github.com/googleapis/python-firestore/issues/4588) [#4583](https://www.github.com/googleapis/python-firestore/issues/4583) [#4107](https://www.github.com/googleapis/python-firestore/issues/4107)
+
+
+* Refactor conformance tests. (#6291) ([4d29c1f](https://www.github.com/googleapis/python-firestore/commit/4d29c1fa7f4a4f10fdafd7797b1f513aa24b7c3c)), closes [#6291](https://www.github.com/googleapis/python-firestore/issues/6291) [#6290](https://www.github.com/googleapis/python-firestore/issues/6290)
+
+
+### Documentation
+
+* add python 2 sunset banner to documentation ([#9036](https://www.github.com/googleapis/python-firestore/issues/9036)) ([819d154](https://www.github.com/googleapis/python-firestore/commit/819d1541bae21e4054124dd32ff38906d82caca9))
+* add upgrading section to index of documentation ([#248](https://www.github.com/googleapis/python-firestore/issues/248)) ([55d1356](https://www.github.com/googleapis/python-firestore/commit/55d1356081c2d2226d7190dac2abdffbf8a0fb2f))
+* adds UPGRADING.md, note to readme, to help inform users about migration to v2 ([#245](https://www.github.com/googleapis/python-firestore/issues/245)) ([6a8cbdd](https://www.github.com/googleapis/python-firestore/commit/6a8cbddd01771190c04a5fc065863e8def3eb44f))
+* document admin client ([#174](https://www.github.com/googleapis/python-firestore/issues/174)) ([f099736](https://www.github.com/googleapis/python-firestore/commit/f09973638e627f741ea7d1f38294c4f8e9677e53)), closes [#30](https://www.github.com/googleapis/python-firestore/issues/30)
+* fix intersphinx reference to requests ([#9294](https://www.github.com/googleapis/python-firestore/issues/9294)) ([e859f3c](https://www.github.com/googleapis/python-firestore/commit/e859f3cb40dae6d9828e01ef28fa2539b978c56f))
+* fix typo in watch documentation ([#115](https://www.github.com/googleapis/python-firestore/issues/115)) ([367ac73](https://www.github.com/googleapis/python-firestore/commit/367ac732048e1e96cacb54238f88603ed47e2833))
+* normalize use of support level badges ([#6159](https://www.github.com/googleapis/python-firestore/issues/6159)) ([6c9f1ac](https://www.github.com/googleapis/python-firestore/commit/6c9f1acd1394d86e5a632a6e2fe1452b5c5b6b87))
+* re-add changelog entries lost in V2 switch ([#178](https://www.github.com/googleapis/python-firestore/issues/178)) ([d4a0f81](https://www.github.com/googleapis/python-firestore/commit/d4a0f8182930e5c74b08ca185c4d94f809b05797)), closes [#177](https://www.github.com/googleapis/python-firestore/issues/177)
+* **firestore:** add documentation for Document,Collection .on_snapshot ([#9275](https://www.github.com/googleapis/python-firestore/issues/9275)) ([f250443](https://www.github.com/googleapis/python-firestore/commit/f250443aa292f0aad757d8fd813467159a333bbf))
+* **firestore:** add new where operators to docstring ([#9789](https://www.github.com/googleapis/python-firestore/issues/9789)) ([c3864f7](https://www.github.com/googleapis/python-firestore/commit/c3864f743f6fdfbfd2a266712c1764ba23749f8f))
+* **firestore:** clarify client threadsafety ([#9254](https://www.github.com/googleapis/python-firestore/issues/9254)) ([4963eee](https://www.github.com/googleapis/python-firestore/commit/4963eee999aa617163db089b6200bb875e5c03fb))
+* **firestore:** remove duplicated word in README ([#9297](https://www.github.com/googleapis/python-firestore/issues/9297)) ([250024c](https://www.github.com/googleapis/python-firestore/commit/250024c4e4fdc0186f52a0e224e6f4b3b7e5694e))
+* **firestore:** standardize use of 'required' and 'optional' in docstrings; add py2 deprecation warning; add 3.8 unit tests (via synth) ([#10068](https://www.github.com/googleapis/python-firestore/issues/10068)) ([0f72f2c](https://www.github.com/googleapis/python-firestore/commit/0f72f2c25bc6023155be49667cb917a1c217ecd3))
+* Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://www.github.com/googleapis/python-firestore/issues/9085)) ([c7b3de8](https://www.github.com/googleapis/python-firestore/commit/c7b3de85ecd5b91b68d4df7a260e25b450e10664))
+* Replace links to '/stable/' with '/latest/'. ([#5901](https://www.github.com/googleapis/python-firestore/issues/5901)) ([e2f606e](https://www.github.com/googleapis/python-firestore/commit/e2f606e472d29725247eeb329bd20524f2a68419)), closes [#5894](https://www.github.com/googleapis/python-firestore/issues/5894)
+
+## [2.0.0-dev2](https://www.github.com/googleapis/python-firestore/compare/v1.9.0...v2.0.0-dev2) (2020-10-26)
+
+
+### ⚠ BREAKING CHANGES
+
+* remove v1beta1 surface for v2 (#96)
+* Begin using new microgenerator for v2 firestore (#91)
+* from `firestore-0.30.0`: revert to merge not being an option;
+
+### Features
+
+* add `retry`/`timeout` to manual surface ([#222](https://www.github.com/googleapis/python-firestore/issues/222)) ([db5f286](https://www.github.com/googleapis/python-firestore/commit/db5f286772592460b2bf02df25a121994889585d)), closes [#221](https://www.github.com/googleapis/python-firestore/issues/221)
+* add support for `not-in` and `not-eq` query operators ([#202](https://www.github.com/googleapis/python-firestore/issues/202)) ([1d09f21](https://www.github.com/googleapis/python-firestore/commit/1d09f21f6c8cb7f69f0e30a960418f0f6899aa01))
+* add type hints for method params ([#182](https://www.github.com/googleapis/python-firestore/issues/182)) ([9b6c2f3](https://www.github.com/googleapis/python-firestore/commit/9b6c2f33351c65901ea648e4407b2817e5e70957))
+* improve type information ([#176](https://www.github.com/googleapis/python-firestore/issues/176)) ([30bb3fb](https://www.github.com/googleapis/python-firestore/commit/30bb3fb5c36648d3b8acf76349a5726d7a5f135d))
+* add support for partition queries ([#210](https://www.github.com/googleapis/python-firestore/issues/210)) ([4f75a75](https://www.github.com/googleapis/python-firestore/commit/4f75a75170be1bbb310b9e4741f4862d694b5bf5))
+* use `update_transforms` for mutations ([#219](https://www.github.com/googleapis/python-firestore/issues/219)) ([c122e41](https://www.github.com/googleapis/python-firestore/commit/c122e4186808468a2ff82e9cc54b501809519859)), closes [#217](https://www.github.com/googleapis/python-firestore/issues/217)
+
+
+### Bug Fixes
+
+* add import message via synth ([#231](https://www.github.com/googleapis/python-firestore/issues/231)) ([5fb02e9](https://www.github.com/googleapis/python-firestore/commit/5fb02e9b9521938ec1040611cf7086077d07aac2)), closes [#227](https://www.github.com/googleapis/python-firestore/issues/227) [#228](https://www.github.com/googleapis/python-firestore/issues/228) [#229](https://www.github.com/googleapis/python-firestore/issues/229)
+* harden version data gathering against DistributionNotFound ([#212](https://www.github.com/googleapis/python-firestore/issues/212)) ([20b7260](https://www.github.com/googleapis/python-firestore/commit/20b72603eb0ae3164f68822c62378853be59d232))
+* name parameter to indicate snapshot support ([#169](https://www.github.com/googleapis/python-firestore/issues/169)) ([be98897](https://www.github.com/googleapis/python-firestore/commit/be988971cc1bbbc3616a849037dafc8cc0bb5745)), closes [#56](https://www.github.com/googleapis/python-firestore/issues/56)
+* remove unnecessary dependency on libcst ([#220](https://www.github.com/googleapis/python-firestore/issues/220)) ([cd358db](https://www.github.com/googleapis/python-firestore/commit/cd358db784c4244271f197156662e38ed21d2f45))
+
+
+### Documentation
+
+* document admin client ([#174](https://www.github.com/googleapis/python-firestore/issues/174)) ([f099736](https://www.github.com/googleapis/python-firestore/commit/f09973638e627f741ea7d1f38294c4f8e9677e53)), closes [#30](https://www.github.com/googleapis/python-firestore/issues/30)
+* re-add changelog entries lost in V2 switch ([#178](https://www.github.com/googleapis/python-firestore/issues/178)) ([d4a0f81](https://www.github.com/googleapis/python-firestore/commit/d4a0f8182930e5c74b08ca185c4d94f809b05797)), closes [#177](https://www.github.com/googleapis/python-firestore/issues/177)
+
+## [2.0.0-dev1](https://www.github.com/googleapis/python-firestore/compare/v1.9.0...v2.0.0-dev1) (2020-08-20)
+
+
+### ⚠ BREAKING CHANGES
+
+* remove v1beta1 surface for v2 (#96)
+* Begin using new microgenerator for v2 firestore (#91)
+* from `firestore-0.30.0`: revert to merge not being an option;
+
+### Features
+
+* asyncio microgen collection ([#119](https://www.github.com/googleapis/python-firestore/issues/119)) ([6281a67](https://www.github.com/googleapis/python-firestore/commit/6281a67e0ead38e7b2e477b7f077da7e0457aa9b))
+* **firestore:** add `IN`, `ARRAY_CONTAINS_ANY` operators; update docstrings (via synth) ([#9439](https://www.github.com/googleapis/python-firestore/issues/9439)) ([107e526](https://www.github.com/googleapis/python-firestore/commit/107e526cb1d887096e99ce86f7125760b325b2bb))
+* add client_options to base client class ([#150](https://www.github.com/googleapis/python-firestore/issues/150)) ([f3bedc1](https://www.github.com/googleapis/python-firestore/commit/f3bedc1efae4430c6853581fafef06d613548314))
+* add inline type hints and pytype ci ([#134](https://www.github.com/googleapis/python-firestore/issues/134)) ([afff842](https://www.github.com/googleapis/python-firestore/commit/afff842a3356cbe5b0342be57341c12b2d601fda))
+* asyncio microgen batch ([#122](https://www.github.com/googleapis/python-firestore/issues/122)) ([a4e5b00](https://www.github.com/googleapis/python-firestore/commit/a4e5b00a4d59e3416061d5c1ed32a111097e88b3))
+* asyncio microgen client ([#118](https://www.github.com/googleapis/python-firestore/issues/118)) ([de4cc44](https://www.github.com/googleapis/python-firestore/commit/de4cc445e34e4a186ccc17bf143e04b45fb35f0b))
+* asyncio microgen document ([#121](https://www.github.com/googleapis/python-firestore/issues/121)) ([31faecb](https://www.github.com/googleapis/python-firestore/commit/31faecb2ab2956bad64b0852f1fe54a05d8907f9))
+* asyncio microgen query ([#127](https://www.github.com/googleapis/python-firestore/issues/127)) ([178fa2c](https://www.github.com/googleapis/python-firestore/commit/178fa2c2a51a6bd6ef7a3c41b8307e44b5eab062))
+* asyncio microgen transaction ([#123](https://www.github.com/googleapis/python-firestore/issues/123)) ([35185a8](https://www.github.com/googleapis/python-firestore/commit/35185a849053877c9cc561e75cdb4cd7338cc508))
+* **firestore:** add v1beta1 deprecation annotation ([#34](https://www.github.com/googleapis/python-firestore/issues/34)) ([b9e2ab5](https://www.github.com/googleapis/python-firestore/commit/b9e2ab58a41c7bbab28028cb88f84bd6013816ed))
+* **firestore:** surface new 'IN' and 'ARRAY_CONTAINS_ANY' operators ([#9541](https://www.github.com/googleapis/python-firestore/issues/9541)) ([5e9fe4f](https://www.github.com/googleapis/python-firestore/commit/5e9fe4f9ba21b9c38ebd41eb7ed083b335472e0b))
+* asyncio system tests ([#132](https://www.github.com/googleapis/python-firestore/issues/132)) ([4256a85](https://www.github.com/googleapis/python-firestore/commit/4256a856e6f1531959ffc080dfc8c8b3a7263ea5))
+* Begin using new microgenerator for v2 firestore ([#91](https://www.github.com/googleapis/python-firestore/issues/91)) ([e0add08](https://www.github.com/googleapis/python-firestore/commit/e0add0860ca958d139787cdbb7fceb570fbb80ab))
+* create async interface ([#61](https://www.github.com/googleapis/python-firestore/issues/61)) ([eaba25e](https://www.github.com/googleapis/python-firestore/commit/eaba25e892fa33c20ecc7aeab1528a004cbf99f7))
+* Create CODEOWNERS ([#40](https://www.github.com/googleapis/python-firestore/issues/40)) ([a0cbf40](https://www.github.com/googleapis/python-firestore/commit/a0cbf403fe88f07c83bec81f275ac168be573e93))
+* integrate limit to last ([#145](https://www.github.com/googleapis/python-firestore/issues/145)) ([55da695](https://www.github.com/googleapis/python-firestore/commit/55da695710d0408fc314ffe5cc6d7a48cb71bc3b)), closes [#57](https://www.github.com/googleapis/python-firestore/issues/57)
+* remove v1beta1 surface for v2 ([#96](https://www.github.com/googleapis/python-firestore/issues/96)) ([b4a8eb9](https://www.github.com/googleapis/python-firestore/commit/b4a8eb97a68b4c7d1bc9faf0b113dca4476d9f1f))
+* use `DatetimeWithNanoseconds` throughout library ([#116](https://www.github.com/googleapis/python-firestore/issues/116)) ([1801ba2](https://www.github.com/googleapis/python-firestore/commit/1801ba2a0e990c533865fef200bbcc3818b3b486))
+
+
+### Bug Fixes
+
+* add mocks to query get tests ([#109](https://www.github.com/googleapis/python-firestore/issues/109)) ([c4c5bfa](https://www.github.com/googleapis/python-firestore/commit/c4c5bfab0e5942706f2b55148e5e4f9fbd2e29f3))
+* async_document docs to match expected usecase ([#129](https://www.github.com/googleapis/python-firestore/issues/129)) ([f26f222](https://www.github.com/googleapis/python-firestore/commit/f26f222a82028568c0974f379454c69a0fc549ca))
+* asyncio microgen client get_all type ([#126](https://www.github.com/googleapis/python-firestore/issues/126)) ([9095368](https://www.github.com/googleapis/python-firestore/commit/9095368eaec4271b87ad792ff9bbd065364109f6))
+* await on to_wrap in AsyncTransactional ([#147](https://www.github.com/googleapis/python-firestore/issues/147)) ([e640e66](https://www.github.com/googleapis/python-firestore/commit/e640e663f525233a8173767f6886537dfd97b121))
+* constructor invalid path tests ([#114](https://www.github.com/googleapis/python-firestore/issues/114)) ([edf7bd1](https://www.github.com/googleapis/python-firestore/commit/edf7bd1879587c05b37910b0a870ba092c6f10ef))
+* coverage to 99p ([8ddfe1d](https://www.github.com/googleapis/python-firestore/commit/8ddfe1df7df501524e4d406d9dd3b396fc2680eb))
+* pytype client errors ([#146](https://www.github.com/googleapis/python-firestore/issues/146)) ([eb19712](https://www.github.com/googleapis/python-firestore/commit/eb1971274038a079be664004a29a40d9b151d964))
+* recover watch stream on more error types ([#9995](https://www.github.com/googleapis/python-firestore/issues/9995)) ([af5fd1d](https://www.github.com/googleapis/python-firestore/commit/af5fd1dabd411a67afa729d1954cb1b9edf4d619)), closes [#L817](https://www.github.com/googleapis/python-firestore/issues/L817)
+* remove six dependency ([#110](https://www.github.com/googleapis/python-firestore/issues/110)) ([6e597f2](https://www.github.com/googleapis/python-firestore/commit/6e597f2886ff0cd3a9027c434006af0f0895257b))
+* remove six dependency ([#120](https://www.github.com/googleapis/python-firestore/issues/120)) ([d82687d](https://www.github.com/googleapis/python-firestore/commit/d82687db3c55c478285d580547d263f1724a09b7))
+* remove six dependency ([#98](https://www.github.com/googleapis/python-firestore/issues/98)) ([b264ccb](https://www.github.com/googleapis/python-firestore/commit/b264ccb9e2618fb7b40d5b4375777363fc26a9a9)), closes [#94](https://www.github.com/googleapis/python-firestore/issues/94)
+* respect transform values passed into collection.add ([#7072](https://www.github.com/googleapis/python-firestore/issues/7072)) ([c643d91](https://www.github.com/googleapis/python-firestore/commit/c643d914075c1bfc2549a56ec419aff90af4d8e7)), closes [#6826](https://www.github.com/googleapis/python-firestore/issues/6826)
+* Support more Python sequence types when encoding to Protobuf ([#21](https://www.github.com/googleapis/python-firestore/issues/21)) ([b1c5987](https://www.github.com/googleapis/python-firestore/commit/b1c5987c606a14874b412e70f93015e161e278d6))
+* type hint improvements ([#144](https://www.github.com/googleapis/python-firestore/issues/144)) ([d30fff8](https://www.github.com/googleapis/python-firestore/commit/d30fff8e42621d42d169e354948c26ee3e0d16f0))
+* update resume token for restarting BiDi streams ([#10282](https://www.github.com/googleapis/python-firestore/issues/10282)) ([61ec5a2](https://www.github.com/googleapis/python-firestore/commit/61ec5a2326aa101bbccbed229582570844e58bb7))
+* **firestore:** fix get and getall method of transaction ([#16](https://www.github.com/googleapis/python-firestore/issues/16)) ([de3aca0](https://www.github.com/googleapis/python-firestore/commit/de3aca0e78b68f66eb76bc679c6e95b0746ad590))
+* **firestore:** fix lint ([#48](https://www.github.com/googleapis/python-firestore/issues/48)) ([7fa00c4](https://www.github.com/googleapis/python-firestore/commit/7fa00c49dc3fab1d687fff9246f3e5ff0682cac0))
+* **firestore:** simplify 'Collection.add', avoid spurious API call ([#9634](https://www.github.com/googleapis/python-firestore/issues/9634)) ([20f093e](https://www.github.com/googleapis/python-firestore/commit/20f093eb65014d307e402b774f14958a29043742)), closes [#9629](https://www.github.com/googleapis/python-firestore/issues/9629)
+* Update team to be in correct org ([#43](https://www.github.com/googleapis/python-firestore/issues/43)) ([bef5a3a](https://www.github.com/googleapis/python-firestore/commit/bef5a3af4613b5f9d753bb6f45275e480e4bb301))
+
+
+### Documentation
+
+* add python 2 sunset banner to documentation ([#9036](https://www.github.com/googleapis/python-firestore/issues/9036)) ([819d154](https://www.github.com/googleapis/python-firestore/commit/819d1541bae21e4054124dd32ff38906d82caca9))
+* fix intersphinx reference to requests ([#9294](https://www.github.com/googleapis/python-firestore/issues/9294)) ([e859f3c](https://www.github.com/googleapis/python-firestore/commit/e859f3cb40dae6d9828e01ef28fa2539b978c56f))
+* **firestore:** clarify client threadsafety ([#9254](https://www.github.com/googleapis/python-firestore/issues/9254)) ([4963eee](https://www.github.com/googleapis/python-firestore/commit/4963eee999aa617163db089b6200bb875e5c03fb))
+* fix typo in watch documentation ([#115](https://www.github.com/googleapis/python-firestore/issues/115)) ([367ac73](https://www.github.com/googleapis/python-firestore/commit/367ac732048e1e96cacb54238f88603ed47e2833))
+* normalize use of support level badges ([#6159](https://www.github.com/googleapis/python-firestore/issues/6159)) ([6c9f1ac](https://www.github.com/googleapis/python-firestore/commit/6c9f1acd1394d86e5a632a6e2fe1452b5c5b6b87))
+* Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://www.github.com/googleapis/python-firestore/issues/9085)) ([c7b3de8](https://www.github.com/googleapis/python-firestore/commit/c7b3de85ecd5b91b68d4df7a260e25b450e10664))
+* Replace links to '/stable/' with '/latest/'. ([#5901](https://www.github.com/googleapis/python-firestore/issues/5901)) ([e2f606e](https://www.github.com/googleapis/python-firestore/commit/e2f606e472d29725247eeb329bd20524f2a68419)), closes [#5894](https://www.github.com/googleapis/python-firestore/issues/5894)
+* **firestore:** add documentation for Document,Collection .on_snapshot ([#9275](https://www.github.com/googleapis/python-firestore/issues/9275)) ([f250443](https://www.github.com/googleapis/python-firestore/commit/f250443aa292f0aad757d8fd813467159a333bbf))
+* **firestore:** add new where operators to docstring ([#9789](https://www.github.com/googleapis/python-firestore/issues/9789)) ([c3864f7](https://www.github.com/googleapis/python-firestore/commit/c3864f743f6fdfbfd2a266712c1764ba23749f8f))
+* **firestore:** remove duplicated word in README ([#9297](https://www.github.com/googleapis/python-firestore/issues/9297)) ([250024c](https://www.github.com/googleapis/python-firestore/commit/250024c4e4fdc0186f52a0e224e6f4b3b7e5694e))
+* **firestore:** standardize use of 'required' and 'optional' in docstrings; add py2 deprecation warning; add 3.8 unit tests (via synth) ([#10068](https://www.github.com/googleapis/python-firestore/issues/10068)) ([0f72f2c](https://www.github.com/googleapis/python-firestore/commit/0f72f2c25bc6023155be49667cb917a1c217ecd3))
+
+### Tests
+* Refactor conformance tests. (#6291) ([4d29c1f](https://www.github.com/googleapis/python-firestore/commit/4d29c1fa7f4a4f10fdafd7797b1f513aa24b7c3c)), closes [#6291](https://www.github.com/googleapis/python-firestore/issues/6291) [#6290](https://www.github.com/googleapis/python-firestore/issues/6290)
+
+
+## [1.9.0](https://www.github.com/googleapis/python-firestore/compare/v1.8.1...v1.9.0) (2020-08-13)
+
+
+### Features
+
+* **firestore:** add client_options to base class ([#148](https://www.github.com/googleapis/python-firestore/issues/148)) ([91d6580](https://www.github.com/googleapis/python-firestore/commit/91d6580e2903ab55798d66bc53541faa86ca76fe))
+
+
+### [1.8.1](https://www.github.com/googleapis/python-firestore/compare/v1.8.0...v1.8.1) (2020-07-07)
+
+
+### Bug Fixes
+
+* **#82:** Add import back to generated client ([#83](https://www.github.com/googleapis/python-firestore/issues/83)) ([2d0ee60](https://www.github.com/googleapis/python-firestore/commit/2d0ee603926ffad484c9874e8745ea97d3c384eb)), closes [#82](https://www.github.com/googleapis/python-firestore/issues/82)
+
+
+## [1.8.0](https://www.github.com/googleapis/python-firestore/compare/v1.7.0...v1.8.0) (2020-07-06)
+
+
+### Features
+
+* support limit to last feature ([#57](https://www.github.com/googleapis/python-firestore/issues/57)) ([8c75e21](https://www.github.com/googleapis/python-firestore/commit/8c75e218331fda25ea3a789e84ba8dc11af2db02))
+* **firestore:** add support of emulator to run system tests on emulator ([#31](https://www.github.com/googleapis/python-firestore/issues/31)) ([891edc7](https://www.github.com/googleapis/python-firestore/commit/891edc7a9fd576cf0b61286502b0ba02223f89c6))
+* **firestore:** add v1beta1 deprecation annotation ([#34](https://www.github.com/googleapis/python-firestore/issues/34)) ([b9e2ab5](https://www.github.com/googleapis/python-firestore/commit/b9e2ab58a41c7bbab28028cb88f84bd6013816ed))
+* **v1:** add batch write ([#62](https://www.github.com/googleapis/python-firestore/issues/62)) ([1415bc4](https://www.github.com/googleapis/python-firestore/commit/1415bc47a7b9742c4a522ab2be67bbcb5ce39db4))
+
+
+### Bug Fixes
+
+* Support more Python sequence types when encoding to Protobuf ([#21](https://www.github.com/googleapis/python-firestore/issues/21)) ([b1c5987](https://www.github.com/googleapis/python-firestore/commit/b1c5987c606a14874b412e70f93015e161e278d6))
+* **firestore:** use specific naming convention ([#58](https://www.github.com/googleapis/python-firestore/issues/58)) ([c97a168](https://www.github.com/googleapis/python-firestore/commit/c97a168d9b1e4f2cd8625b02f66d6978381652dd))
+
+
+### Documentation
+
+* **firestore:** on_snapshot document changes ([#79](https://www.github.com/googleapis/python-firestore/issues/79)) ([c556fc5](https://www.github.com/googleapis/python-firestore/commit/c556fc5c656ed313c2b1d3eb37435c694601ee11))
+
+
## [1.7.0](https://www.github.com/googleapis/python-firestore/compare/v1.6.2...v1.7.0) (2020-05-18)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index bd01896aa1..577a55d876 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
diff --git a/MANIFEST.in b/MANIFEST.in
index 68855abc3f..e9e29d1203 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -20,3 +20,6 @@ recursive-include google *.json *.proto
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
\ No newline at end of file
diff --git a/Makefile_v1 b/Makefile_v1
index af193e3e81..1648687e27 100644
--- a/Makefile_v1
+++ b/Makefile_v1
@@ -11,30 +11,51 @@ GOOGLEAPIS_REPO = $(REPO_DIR)/googleapis
TESTS_REPO = $(REPO_DIR)/conformance-tests
TEST_PROTO_DIR = $(TESTS_REPO)/firestore/v1
TEST_PROTO_SRC = $(TEST_PROTO_DIR)/proto/google/cloud/conformance/firestore/v1/tests.proto
+TESTDATA_DIR = `pwd`/tests/unit/v1/testdata/
TMPDIR = /tmp/python-fs-proto
-TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1/proto
+TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1/types
TEST_PROTO_COPY = $(TMPDIR_FS)/tests.proto
+TEST_GEN_OUT = tests/unit/v1/conformance_tests.py
+OUTDIR = /tmp/python-fs-gen
-.PHONY: sync-protos gen-protos
+.PHONY: sync-protos gen-protos docker-pull
-gen-protos: sync-protos tweak-protos
- # TODO(jba): Put the generated proto somewhere more suitable.
- $(PROTOC) --python_out=. \
- -I $(TMPDIR) \
- -I $(PROTOBUF_REPO)/src \
- -I $(GOOGLEAPIS_REPO) \
- $(TEST_PROTO_COPY)
+gen-protos: sync-protos tweak-protos docker-pull gen-protos-raw
+
+gen-protos-raw:
+ mkdir -p $(OUTDIR)
+ docker run \
+ --mount type=bind,source=$(TMPDIR),destination="/in",readonly \
+ --mount type=bind,source=$(OUTDIR),destination="/out" \
+ --rm \
+ --user `id -u`:`id -g` \
+ gcr.io/gapic-images/gapic-generator-python
+ cp $(OUTDIR)/google/cloud/firestore_v1/types/tests.py \
+ $(TEST_GEN_OUT)
+ sed -i -e \
+ "s@package='google.cloud.firestore_v1'@package='tests.unit.v1'@" \
+ $(TEST_GEN_OUT)
tweak-protos:
mkdir -p $(TMPDIR_FS)
cp $(GOOGLEAPIS_REPO)/google/firestore/v1/*.proto $(TMPDIR_FS)
- sed -i -e 's@google/firestore/v1@google/cloud/firestore_v1/proto@' $(TMPDIR_FS)/*.proto
+ sed -i -e 's@google/firestore/v1@google/cloud/firestore_v1/types@' $(TMPDIR_FS)/*.proto
+ sed -i -e 's@package google\.firestore\.v1@package google.cloud.firestore_v1@' $(TMPDIR_FS)/*.proto
cp $(TEST_PROTO_SRC) $(TEST_PROTO_COPY)
- sed -i -e 's@package google.cloud.conformance.firestore.v1@package google.cloud.firestore_v1.proto@' $(TEST_PROTO_COPY)
- sed -i -e 's@google/firestore/v1@google/cloud/firestore_v1/proto@' $(TEST_PROTO_COPY)
+ sed -i -e 's@package google\.cloud\.conformance\.firestore\.v1@package google.cloud.firestore_v1@' $(TEST_PROTO_COPY)
+ sed -i -e 's@google/firestore/v1@google/cloud/firestore_v1/types@' $(TEST_PROTO_COPY)
+ sed -i -e 's@google\.firestore\.v1@google.cloud.firestore_v1@' $(TEST_PROTO_COPY)
+ sed -i -e 's@Cursor@Cursor_@' $(TEST_PROTO_COPY)
sync-protos:
cd $(PROTOBUF_REPO); git pull
cd $(GOOGLEAPIS_REPO); git pull
cd $(TESTS_REPO); git pull
+
+docker-pull:
+ docker pull gcr.io/gapic-images/gapic-generator-python:latest
+
+copy-testdata:
+ rm $(TESTDATA_DIR)/*.json
+ cp $(TEST_PROTO_DIR)/*.json $(TESTDATA_DIR)/
diff --git a/Makefile_v1beta1 b/Makefile_v1beta1
deleted file mode 100644
index 69cf87f41a..0000000000
--- a/Makefile_v1beta1
+++ /dev/null
@@ -1,37 +0,0 @@
-# This makefile builds the protos needed for cross-language Firestore tests.
-
-# Assume protoc is on the path. The proto compiler must be one that
-# supports proto3 syntax.
-PROTOC = protoc
-
-# Dependent repos.
-REPO_DIR = $(HOME)/git-repos
-PROTOBUF_REPO = $(REPO_DIR)/protobuf
-GOOGLEAPIS_REPO = $(REPO_DIR)/googleapis
-TESTS_REPO = $(REPO_DIR)/gcp/google-cloud-common
-
-TMPDIR = /tmp/python-fs-proto
-TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1beta1/proto
-
-.PHONY: sync-protos gen-protos
-
-gen-protos: sync-protos tweak-protos
- # TODO(jba): Put the generated proto somewhere more suitable.
- $(PROTOC) --python_out=google/cloud/firestore_v1beta1/proto \
- -I $(TMPDIR) \
- -I $(PROTOBUF_REPO)/src \
- -I $(GOOGLEAPIS_REPO) \
- $(TMPDIR)/test_v1beta1.proto
-
-tweak-protos:
- mkdir -p $(TMPDIR_FS)
- cp $(GOOGLEAPIS_REPO)/google/firestore/v1beta1/*.proto $(TMPDIR_FS)
- sed -i -e 's@google/firestore/v1beta1@google/cloud/firestore_v1beta1/proto@' $(TMPDIR_FS)/*.proto
- cp $(TESTS_REPO)/testing/firestore/proto/test_v1beta1.proto $(TMPDIR)
- sed -i -e 's@package tests@package tests.v1beta1@' $(TMPDIR)/test_v1beta1.proto
- sed -i -e 's@google/firestore/v1beta1@google/cloud/firestore_v1beta1/proto@' $(TMPDIR)/test_v1beta1.proto
-
-sync-protos:
- cd $(PROTOBUF_REPO); git pull
- cd $(GOOGLEAPIS_REPO); git pull
- #cd $(TESTS_REPO); git pull
diff --git a/README.rst b/README.rst
index e2b9a90af8..a36648f7ff 100644
--- a/README.rst
+++ b/README.rst
@@ -55,12 +55,14 @@ dependencies.
Supported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^
-Python >= 3.5
+Python >= 3.6
Deprecated Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^^
-Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+Python == 2.7.
+The last version of this library compatible with Python 2.7 is
+google-cloud-firestore==1.9.0.
Mac/Linux
^^^^^^^^^
diff --git a/UPGRADING.md b/UPGRADING.md
new file mode 100644
index 0000000000..a213b8013a
--- /dev/null
+++ b/UPGRADING.md
@@ -0,0 +1,134 @@
+# 2.0.0 Migration Guide
+
+The 2.0 release of the `google-cloud-firestore` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library may require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage.
+
+If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-firestore/issues).
+
+## Supported Python Versions
+
+> **WARNING**: Breaking change
+
+The 2.0.0 release requires Python 3.6+.
+
+
+## Method Calls
+
+> **WARNING**: Breaking change
+
+If you previously were using modules or functions under the namespace
+`google.cloud.firestore_v1.gapic` there is a high likelihood you have incompatible code.
+To assist with this, we have included some helpful scripts to make some of the code
+modifications required to use 2.0.0.
+
+* Install the library
+
+```py
+python3 -m pip install google-cloud-firestore
+```
+
+* The scripts `fixup_firestore_v1_keywords.py` and `fixup_firestore_admin_v1_keywords.py`
+is shipped with the library. It expects an input directory (with the code to convert)
+and an empty destination directory.
+
+```sh
+$ fixup_firestore_v1_keywords.py --input-directory .samples/ --output-directory samples/
+$ fixup_firestore_admin_v1_keywords.py --input-directory .samples/ --output-directory samples/
+```
+
+### More Details
+
+In `google-cloud-firestore<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters.
+
+**Before:**
+```py
+ def a_method(
+ self,
+ param1,
+ param2,
+ param3,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+```
+
+In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional.
+
+Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer.
+
+
+**After:**
+```py
+ def a_method(
+ self,
+ request: RequestType = None,
+ *
+ param1,
+ param2,
+ param3,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+```
+
+> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive.
+> Passing both will result in an error.
+
+
+Both of these calls are valid:
+
+```py
+response = client.a_method(
+ request={
+ "param1": param1,
+ "param2": param2,
+ "param3": param3
+ }
+)
+```
+
+```py
+response = client.a_method(
+ param1=param1,
+ param2=param2,
+ param3=param3
+)
+```
+
+This call is invalid because it mixes `request` with a keyword argument `param1`. Executing this code
+will result in an error.
+
+```py
+response = client.a_method(
+ request={
+ "param1": param1,
+ "param2": param2
+ },
+ param2=param2
+)
+```
+
+
+
+## Enums and Types
+
+
+> **WARNING**: Breaking change
+
+The `enums` submodule has been removed.
+
+**Before:**
+```py
+from google.cloud import firestore_v1
+
+direction = firestore_v1.enums.StructuredQuery.Direction.ASCENDING
+```
+
+
+**After:**
+```py
+from google.cloud import firestore_v1
+
+direction = firestore_v1.types.StructuredQuery.Direction.ASCENDING
+```
diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md
new file mode 100644
index 0000000000..6dfcf4aedb
--- /dev/null
+++ b/docs/UPGRADING.md
@@ -0,0 +1,134 @@
+# 2.0.0 Migration Guide
+
+The 2.0 release of the `google-cloud-firestore` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library may require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage.
+
+If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-firestore/issues).
+
+## Supported Python Versions
+
+> **WARNING**: Breaking change
+
+The 2.0.0 release requires Python 3.6+.
+
+
+## Method Calls
+
+> **WARNING**: Breaking change
+
+If you previously were using modules or functions under the namespace
+`google.cloud.firestore_v1.gapic` there is a high likelihood you have incompatible code.
+To assist with this, we have includes some helpful scripts to make some of the code
+modifications required to use 2.0.0.
+
+* Install the library
+
+```py
+python3 -m pip install google-cloud-firestore
+```
+
+* The scripts `fixup_firestore_v1_keywords.py` and `fixup_firestore_admin_v1_keywords.py`
+is shipped with the library. It expects an input directory (with the code to convert)
+and an empty destination directory.
+
+```sh
+$ fixup_firestore_v1_keywords.py --input-directory .samples/ --output-directory samples/
+$ fixup_firestore_admin_v1_keywords.py --input-directory .samples/ --output-directory samples/
+```
+
+### More Details
+
+In `google-cloud-firestore<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters.
+
+**Before:**
+```py
+ def a_method(
+ self,
+ param1,
+ param2,
+ param3,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+```
+
+In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional.
+
+Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer.
+
+
+**After:**
+```py
+ def a_method(
+ self,
+ request: RequestType = None,
+ *
+ param1,
+ param2,
+ param3,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+```
+
+> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive.
+> Passing both will result in an error.
+
+
+Both of these calls are valid:
+
+```py
+response = client.a_method(
+ request={
+ "param1": param1,
+ "param2": param2,
+ "param3": param3
+ }
+)
+```
+
+```py
+response = client.a_method(
+ param1=param1,
+ param2=param2,
+ param3=param3
+)
+```
+
+This call is invalid because it mixes `request` with a keyword argument `param1`. Executing this code
+will result in an error.
+
+```py
+response = client.a_method(
+ request={
+ "param1": param1,
+ "param2": param2
+ },
+ param2=param2
+)
+```
+
+
+
+## Enums and Types
+
+
+> **WARNING**: Breaking change
+
+The submodules `enums` and `types` have been removed.
+
+**Before:**
+```py
+from google.cloud import firestore_v1
+
+direction = firestore_v1.enums.StructuredQuery.Direction.ASCENDING
+```
+
+
+**After:**
+```py
+from google.cloud import firestore_v1
+
+direction = firestore_v1.types.StructuredQuery.Direction.ASCENDING
+```
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index 228529efe2..6316a537f7 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/docs/admin_client.rst b/docs/admin_client.rst
new file mode 100644
index 0000000000..01f02db5d0
--- /dev/null
+++ b/docs/admin_client.rst
@@ -0,0 +1,6 @@
+Firestore Admin Client
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: google.cloud.firestore_admin_v1.services.firestore_admin.client
+ :members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index 5a50b3c58f..742217c2a4 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,12 +20,16 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -35,24 +39,22 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
+ "recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
-autodoc_default_flags = ["members"]
+autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
-# Allow markdown includes (so releases.md can include CHANGLEOG.md)
-# http://www.sphinx-doc.org/en/master/markdown.html
-source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
-
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
@@ -93,7 +95,12 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -340,7 +347,7 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
diff --git a/docs/index.rst b/docs/index.rst
index b8157df9bd..34002786f1 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,12 +1,6 @@
.. include:: README.rst
-.. note::
-
- Because the firestore client uses :mod:`grpcio` library, it is safe to
- share instances across threads. In multiprocessing scenarios, the best
- practice is to create client instances *after* the invocation of
- :func:`os.fork` by :class:`multiprocessing.Pool` or
- :class:`multiprocessing.Process`.
+.. include:: multiprocessing.rst
API Reference
-------------
@@ -23,7 +17,17 @@ API Reference
transaction
transforms
types
+ admin_client
+
+Migration Guide
+---------------
+
+See the guide below for instructions on migrating to the 2.x release of this library.
+
+.. toctree::
+ :maxdepth: 2
+ UPGRADING
Changelog
---------
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
new file mode 100644
index 0000000000..1cb29d4ca9
--- /dev/null
+++ b/docs/multiprocessing.rst
@@ -0,0 +1,7 @@
+.. note::
+
+ Because this client uses :mod:`grpcio` library, it is safe to
+ share instances across threads. In multiprocessing scenarios, the best
+ practice is to create client instances *after* the invocation of
+ :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :class:`multiprocessing.Process`.
diff --git a/google/cloud/firestore.py b/google/cloud/firestore.py
index 3bdb9af565..f80d62c090 100644
--- a/google/cloud/firestore.py
+++ b/google/cloud/firestore.py
@@ -18,12 +18,20 @@
from google.cloud.firestore_v1 import __version__
from google.cloud.firestore_v1 import ArrayRemove
from google.cloud.firestore_v1 import ArrayUnion
+from google.cloud.firestore_v1 import AsyncClient
+from google.cloud.firestore_v1 import AsyncCollectionReference
+from google.cloud.firestore_v1 import AsyncDocumentReference
+from google.cloud.firestore_v1 import AsyncQuery
+from google.cloud.firestore_v1 import async_transactional
+from google.cloud.firestore_v1 import AsyncTransaction
+from google.cloud.firestore_v1 import AsyncWriteBatch
from google.cloud.firestore_v1 import Client
+from google.cloud.firestore_v1 import CollectionGroup
from google.cloud.firestore_v1 import CollectionReference
from google.cloud.firestore_v1 import DELETE_FIELD
from google.cloud.firestore_v1 import DocumentReference
from google.cloud.firestore_v1 import DocumentSnapshot
-from google.cloud.firestore_v1 import enums
+from google.cloud.firestore_v1 import DocumentTransform
from google.cloud.firestore_v1 import ExistsOption
from google.cloud.firestore_v1 import GeoPoint
from google.cloud.firestore_v1 import Increment
@@ -39,18 +47,27 @@
from google.cloud.firestore_v1 import Watch
from google.cloud.firestore_v1 import WriteBatch
from google.cloud.firestore_v1 import WriteOption
+from typing import List
-__all__ = [
+__all__: List[str] = [
"__version__",
"ArrayRemove",
"ArrayUnion",
+ "AsyncClient",
+ "AsyncCollectionReference",
+ "AsyncDocumentReference",
+ "AsyncQuery",
+ "async_transactional",
+ "AsyncTransaction",
+ "AsyncWriteBatch",
"Client",
+ "CollectionGroup",
"CollectionReference",
"DELETE_FIELD",
"DocumentReference",
"DocumentSnapshot",
- "enums",
+ "DocumentTransform",
"ExistsOption",
"GeoPoint",
"Increment",
diff --git a/google/cloud/firestore_admin_v1/__init__.py b/google/cloud/firestore_admin_v1/__init__.py
index 23f844b617..8c74777216 100644
--- a/google/cloud/firestore_admin_v1/__init__.py
+++ b/google/cloud/firestore_admin_v1/__init__.py
@@ -1,41 +1,65 @@
# -*- coding: utf-8 -*-
-#
+
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-
-from __future__ import absolute_import
-import sys
-import warnings
-
-from google.cloud.firestore_admin_v1 import types
-from google.cloud.firestore_admin_v1.gapic import enums
-from google.cloud.firestore_admin_v1.gapic import firestore_admin_client
-
-
-if sys.version_info[:2] == (2, 7):
- message = (
- "A future version of this library will drop support for Python 2.7."
- "More details about Python 2 support for Google Cloud Client Libraries"
- "can be found at https://cloud.google.com/python/docs/python2-sunset/"
- )
- warnings.warn(message, DeprecationWarning)
-
-
-class FirestoreAdminClient(firestore_admin_client.FirestoreAdminClient):
- __doc__ = firestore_admin_client.FirestoreAdminClient.__doc__
- enums = enums
+from .services.firestore_admin import FirestoreAdminClient
+from .types.field import Field
+from .types.firestore_admin import CreateIndexRequest
+from .types.firestore_admin import DeleteIndexRequest
+from .types.firestore_admin import ExportDocumentsRequest
+from .types.firestore_admin import GetFieldRequest
+from .types.firestore_admin import GetIndexRequest
+from .types.firestore_admin import ImportDocumentsRequest
+from .types.firestore_admin import ListFieldsRequest
+from .types.firestore_admin import ListFieldsResponse
+from .types.firestore_admin import ListIndexesRequest
+from .types.firestore_admin import ListIndexesResponse
+from .types.firestore_admin import UpdateFieldRequest
+from .types.index import Index
+from .types.location import LocationMetadata
+from .types.operation import ExportDocumentsMetadata
+from .types.operation import ExportDocumentsResponse
+from .types.operation import FieldOperationMetadata
+from .types.operation import ImportDocumentsMetadata
+from .types.operation import IndexOperationMetadata
+from .types.operation import OperationState
+from .types.operation import Progress
-__all__ = ("enums", "types", "FirestoreAdminClient")
+__all__ = (
+ "CreateIndexRequest",
+ "DeleteIndexRequest",
+ "ExportDocumentsMetadata",
+ "ExportDocumentsRequest",
+ "ExportDocumentsResponse",
+ "Field",
+ "FieldOperationMetadata",
+ "GetFieldRequest",
+ "GetIndexRequest",
+ "ImportDocumentsMetadata",
+ "ImportDocumentsRequest",
+ "Index",
+ "IndexOperationMetadata",
+ "ListFieldsRequest",
+ "ListFieldsResponse",
+ "ListIndexesRequest",
+ "ListIndexesResponse",
+ "LocationMetadata",
+ "OperationState",
+ "Progress",
+ "UpdateFieldRequest",
+ "FirestoreAdminClient",
+)
diff --git a/google/cloud/firestore_admin_v1/gapic/enums.py b/google/cloud/firestore_admin_v1/gapic/enums.py
deleted file mode 100644
index 09acf6c3ef..0000000000
--- a/google/cloud/firestore_admin_v1/gapic/enums.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Wrappers for protocol buffer enum types."""
-
-import enum
-
-
-class OperationState(enum.IntEnum):
- """
- Describes the state of the operation.
-
- Attributes:
- OPERATION_STATE_UNSPECIFIED (int): Unspecified.
- INITIALIZING (int): Request is being prepared for processing.
- PROCESSING (int): Request is actively being processed.
- CANCELLING (int): Request is in the process of being cancelled after user called
- google.longrunning.Operations.CancelOperation on the operation.
- FINALIZING (int): Request has been processed and is in its finalization stage.
- SUCCESSFUL (int): Request has completed successfully.
- FAILED (int): Request has finished being processed, but encountered an error.
- CANCELLED (int): Request has finished being cancelled after user called
- google.longrunning.Operations.CancelOperation.
- """
-
- OPERATION_STATE_UNSPECIFIED = 0
- INITIALIZING = 1
- PROCESSING = 2
- CANCELLING = 3
- FINALIZING = 4
- SUCCESSFUL = 5
- FAILED = 6
- CANCELLED = 7
-
-
-class FieldOperationMetadata(object):
- class IndexConfigDelta(object):
- class ChangeType(enum.IntEnum):
- """
- Specifies how the index is changing.
-
- Attributes:
- CHANGE_TYPE_UNSPECIFIED (int): The type of change is not specified or known.
- ADD (int): The single field index is being added.
- REMOVE (int): The single field index is being removed.
- """
-
- CHANGE_TYPE_UNSPECIFIED = 0
- ADD = 1
- REMOVE = 2
-
-
-class Index(object):
- class QueryScope(enum.IntEnum):
- """
- Query Scope defines the scope at which a query is run. This is specified
- on a StructuredQuery's ``from`` field.
-
- Attributes:
- QUERY_SCOPE_UNSPECIFIED (int): The query scope is unspecified. Not a valid option.
- COLLECTION (int): Indexes with a collection query scope specified allow queries
- against a collection that is the child of a specific document, specified
- at query time, and that has the collection id specified by the index.
- COLLECTION_GROUP (int): Indexes with a collection group query scope specified allow queries
- against all collections that has the collection id specified by the
- index.
- """
-
- QUERY_SCOPE_UNSPECIFIED = 0
- COLLECTION = 1
- COLLECTION_GROUP = 2
-
- class State(enum.IntEnum):
- """
- The state of an index. During index creation, an index will be in the
- ``CREATING`` state. If the index is created successfully, it will
- transition to the ``READY`` state. If the index creation encounters a
- problem, the index will transition to the ``NEEDS_REPAIR`` state.
-
- Attributes:
- STATE_UNSPECIFIED (int): The state is unspecified.
- CREATING (int): The index is being created.
- There is an active long-running operation for the index.
- The index is updated when writing a document.
- Some index data may exist.
- READY (int): The index is ready to be used.
- The index is updated when writing a document.
- The index is fully populated from all stored documents it applies to.
- NEEDS_REPAIR (int): The index was being created, but something went wrong.
- There is no active long-running operation for the index,
- and the most recently finished long-running operation failed.
- The index is not updated when writing a document.
- Some index data may exist.
- Use the google.longrunning.Operations API to determine why the operation
- that last attempted to create this index failed, then re-create the
- index.
- """
-
- STATE_UNSPECIFIED = 0
- CREATING = 1
- READY = 2
- NEEDS_REPAIR = 3
-
- class IndexField(object):
- class ArrayConfig(enum.IntEnum):
- """
- The supported array value configurations.
-
- Attributes:
- ARRAY_CONFIG_UNSPECIFIED (int): The index does not support additional array queries.
- CONTAINS (int): The index supports array containment queries.
- """
-
- ARRAY_CONFIG_UNSPECIFIED = 0
- CONTAINS = 1
-
- class Order(enum.IntEnum):
- """
- The supported orderings.
-
- Attributes:
- ORDER_UNSPECIFIED (int): The ordering is unspecified. Not a valid option.
- ASCENDING (int): The field is ordered by ascending field value.
- DESCENDING (int): The field is ordered by descending field value.
- """
-
- ORDER_UNSPECIFIED = 0
- ASCENDING = 1
- DESCENDING = 2
diff --git a/google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py b/google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py
deleted file mode 100644
index 9b80814f9f..0000000000
--- a/google/cloud/firestore_admin_v1/gapic/firestore_admin_client.py
+++ /dev/null
@@ -1,1016 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Accesses the google.firestore.admin.v1 FirestoreAdmin API."""
-
-import functools
-import pkg_resources
-import warnings
-
-from google.oauth2 import service_account
-import google.api_core.client_options
-import google.api_core.gapic_v1.client_info
-import google.api_core.gapic_v1.config
-import google.api_core.gapic_v1.method
-import google.api_core.gapic_v1.routing_header
-import google.api_core.grpc_helpers
-import google.api_core.page_iterator
-import google.api_core.path_template
-import grpc
-
-from google.cloud.firestore_admin_v1.gapic import enums
-from google.cloud.firestore_admin_v1.gapic import firestore_admin_client_config
-from google.cloud.firestore_admin_v1.gapic.transports import (
- firestore_admin_grpc_transport,
-)
-from google.cloud.firestore_admin_v1.proto import field_pb2
-from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2
-from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2_grpc
-from google.cloud.firestore_admin_v1.proto import index_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import field_mask_pb2
-
-
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
- "google-cloud-firestore"
-).version
-
-
-class FirestoreAdminClient(object):
- """
- Operations are created by service ``FirestoreAdmin``, but are accessed
- via service ``google.longrunning.Operations``.
- """
-
- SERVICE_ADDRESS = "firestore.googleapis.com:443"
- """The default address of the service."""
-
- # The name of the interface for this client. This is the key used to
- # find the method configuration in the client_config dictionary.
- _INTERFACE_NAME = "google.firestore.admin.v1.FirestoreAdmin"
-
- @classmethod
- def from_service_account_file(cls, filename, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- FirestoreAdminClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @classmethod
- def database_path(cls, project, database):
- """Return a fully-qualified database string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}",
- project=project,
- database=database,
- )
-
- @classmethod
- def field_path(cls, project, database, collection_id, field_id):
- """Return a fully-qualified field string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/collectionGroups/{collection_id}/fields/{field_id}",
- project=project,
- database=database,
- collection_id=collection_id,
- field_id=field_id,
- )
-
- @classmethod
- def index_path(cls, project, database, collection_id, index_id):
- """Return a fully-qualified index string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/collectionGroups/{collection_id}/indexes/{index_id}",
- project=project,
- database=database,
- collection_id=collection_id,
- index_id=index_id,
- )
-
- @classmethod
- def parent_path(cls, project, database, collection_id):
- """Return a fully-qualified parent string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/collectionGroups/{collection_id}",
- project=project,
- database=database,
- collection_id=collection_id,
- )
-
- def __init__(
- self,
- transport=None,
- channel=None,
- credentials=None,
- client_config=None,
- client_info=None,
- client_options=None,
- ):
- """Constructor.
-
- Args:
- transport (Union[~.FirestoreAdminGrpcTransport,
- Callable[[~.Credentials, type], ~.FirestoreAdminGrpcTransport]): A transport
- instance, responsible for actually making the API calls.
- The default transport uses the gRPC protocol.
- This argument may also be a callable which returns a
- transport instance. Callables will be sent the credentials
- as the first argument and the default transport class as
- the second argument.
- channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
- through which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- This argument is mutually exclusive with providing a
- transport instance to ``transport``; doing so will raise
- an exception.
- client_config (dict): DEPRECATED. A dictionary of call options for
- each method. If not specified, the default configuration is used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- client_options (Union[dict, google.api_core.client_options.ClientOptions]):
- Client options used to set user options on the client. API Endpoint
- should be set through client_options.
- """
- # Raise deprecation warnings for things we want to go away.
- if client_config is not None:
- warnings.warn(
- "The `client_config` argument is deprecated.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
- else:
- client_config = firestore_admin_client_config.config
-
- if channel:
- warnings.warn(
- "The `channel` argument is deprecated; use " "`transport` instead.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
-
- api_endpoint = self.SERVICE_ADDRESS
- if client_options:
- if type(client_options) == dict:
- client_options = google.api_core.client_options.from_dict(
- client_options
- )
- if client_options.api_endpoint:
- api_endpoint = client_options.api_endpoint
-
- # Instantiate the transport.
- # The transport is responsible for handling serialization and
- # deserialization and actually sending data to the service.
- if transport:
- if callable(transport):
- self.transport = transport(
- credentials=credentials,
- default_class=firestore_admin_grpc_transport.FirestoreAdminGrpcTransport,
- address=api_endpoint,
- )
- else:
- if credentials:
- raise ValueError(
- "Received both a transport instance and "
- "credentials; these are mutually exclusive."
- )
- self.transport = transport
- else:
- self.transport = firestore_admin_grpc_transport.FirestoreAdminGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
- )
-
- if client_info is None:
- client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
- )
- else:
- client_info.gapic_version = _GAPIC_LIBRARY_VERSION
- self._client_info = client_info
-
- # Parse out the default settings for retry and timeout for each RPC
- # from the client configuration.
- # (Ordinarily, these are the defaults specified in the `*_config.py`
- # file next to this one.)
- self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
- )
-
- # Save a dictionary of cached API call functions.
- # These are the actual callables which invoke the proper
- # transport methods, wrapped with `wrap_method` to add retry,
- # timeout, and the like.
- self._inner_api_calls = {}
-
- # Service calls
- def create_index(
- self,
- parent,
- index,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates a composite index. This returns a
- ``google.longrunning.Operation`` which may be used to track the status
- of the creation. The metadata for the operation will be the type
- ``IndexOperationMetadata``.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> parent = client.parent_path('[PROJECT]', '[DATABASE]', '[COLLECTION_ID]')
- >>>
- >>> # TODO: Initialize `index`:
- >>> index = {}
- >>>
- >>> response = client.create_index(parent, index)
-
- Args:
- parent (str): Required. A parent name of the form
- ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
- index (Union[dict, ~google.cloud.firestore_admin_v1.types.Index]): Required. The composite index to create.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_admin_v1.types.Index`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_admin_v1.types.Operation` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_index" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_index"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_index,
- default_retry=self._method_configs["CreateIndex"].retry,
- default_timeout=self._method_configs["CreateIndex"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.CreateIndexRequest(parent=parent, index=index)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["create_index"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def list_indexes(
- self,
- parent,
- filter_=None,
- page_size=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists composite indexes.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> parent = client.parent_path('[PROJECT]', '[DATABASE]', '[COLLECTION_ID]')
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_indexes(parent):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_indexes(parent).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. A parent name of the form
- ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
- filter_ (str): The filter to apply to list results.
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`~google.cloud.firestore_admin_v1.types.Index` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_indexes" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_indexes"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_indexes,
- default_retry=self._method_configs["ListIndexes"].retry,
- default_timeout=self._method_configs["ListIndexes"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.ListIndexesRequest(
- parent=parent, filter=filter_, page_size=page_size
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_indexes"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="indexes",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
-
- def get_index(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets a composite index.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> name = client.index_path('[PROJECT]', '[DATABASE]', '[COLLECTION_ID]', '[INDEX_ID]')
- >>>
- >>> response = client.get_index(name)
-
- Args:
- name (str): Required. A name of the form
- ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_admin_v1.types.Index` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_index" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_index"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_index,
- default_retry=self._method_configs["GetIndex"].retry,
- default_timeout=self._method_configs["GetIndex"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.GetIndexRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_index"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def delete_index(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Deletes a composite index.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> name = client.index_path('[PROJECT]', '[DATABASE]', '[COLLECTION_ID]', '[INDEX_ID]')
- >>>
- >>> client.delete_index(name)
-
- Args:
- name (str): Required. A name of the form
- ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "delete_index" not in self._inner_api_calls:
- self._inner_api_calls[
- "delete_index"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.delete_index,
- default_retry=self._method_configs["DeleteIndex"].retry,
- default_timeout=self._method_configs["DeleteIndex"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.DeleteIndexRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- self._inner_api_calls["delete_index"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def import_documents(
- self,
- name,
- collection_ids=None,
- input_uri_prefix=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Imports documents into Google Cloud Firestore. Existing documents with the
- same name are overwritten. The import occurs in the background and its
- progress can be monitored and managed via the Operation resource that is
- created. If an ImportDocuments operation is cancelled, it is possible
- that a subset of the data has already been imported to Cloud Firestore.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> name = client.database_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> response = client.import_documents(name)
-
- Args:
- name (str): Required. Database to import into. Should be of the form:
- ``projects/{project_id}/databases/{database_id}``.
- collection_ids (list[str]): Which collection ids to import. Unspecified means all collections included
- in the import.
- input_uri_prefix (str): Location of the exported files. This must match the output\_uri\_prefix
- of an ExportDocumentsResponse from an export that has completed
- successfully. See:
- ``google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix``.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_admin_v1.types.Operation` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "import_documents" not in self._inner_api_calls:
- self._inner_api_calls[
- "import_documents"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.import_documents,
- default_retry=self._method_configs["ImportDocuments"].retry,
- default_timeout=self._method_configs["ImportDocuments"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.ImportDocumentsRequest(
- name=name, collection_ids=collection_ids, input_uri_prefix=input_uri_prefix
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["import_documents"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def export_documents(
- self,
- name,
- collection_ids=None,
- output_uri_prefix=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Exports a copy of all or a subset of documents from Google Cloud Firestore
- to another storage system, such as Google Cloud Storage. Recent updates to
- documents may not be reflected in the export. The export occurs in the
- background and its progress can be monitored and managed via the
- Operation resource that is created. The output of an export may only be
- used once the associated operation is done. If an export operation is
- cancelled before completion it may leave partial data behind in Google
- Cloud Storage.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> name = client.database_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> response = client.export_documents(name)
-
- Args:
- name (str): Required. Database to export. Should be of the form:
- ``projects/{project_id}/databases/{database_id}``.
- collection_ids (list[str]): Which collection ids to export. Unspecified means all collections.
- output_uri_prefix (str): The output URI. Currently only supports Google Cloud Storage URIs of the
- form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` is
- the name of the Google Cloud Storage bucket and ``NAMESPACE_PATH`` is an
- optional Google Cloud Storage namespace path. When choosing a name, be
- sure to consider Google Cloud Storage naming guidelines:
- https://cloud.google.com/storage/docs/naming. If the URI is a bucket
- (without a namespace path), a prefix will be generated based on the
- start time.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_admin_v1.types.Operation` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "export_documents" not in self._inner_api_calls:
- self._inner_api_calls[
- "export_documents"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.export_documents,
- default_retry=self._method_configs["ExportDocuments"].retry,
- default_timeout=self._method_configs["ExportDocuments"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.ExportDocumentsRequest(
- name=name,
- collection_ids=collection_ids,
- output_uri_prefix=output_uri_prefix,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["export_documents"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def get_field(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets the metadata and configuration for a Field.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> name = client.field_path('[PROJECT]', '[DATABASE]', '[COLLECTION_ID]', '[FIELD_ID]')
- >>>
- >>> response = client.get_field(name)
-
- Args:
- name (str): Required. A name of the form
- ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_admin_v1.types.Field` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_field" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_field"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_field,
- default_retry=self._method_configs["GetField"].retry,
- default_timeout=self._method_configs["GetField"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.GetFieldRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_field"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def list_fields(
- self,
- parent,
- filter_=None,
- page_size=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists the field configuration and metadata for this database.
-
- Currently, ``FirestoreAdmin.ListFields`` only supports listing fields
- that have been explicitly overridden. To issue this query, call
- ``FirestoreAdmin.ListFields`` with the filter set to
- ``indexConfig.usesAncestorConfig:false``.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> parent = client.parent_path('[PROJECT]', '[DATABASE]', '[COLLECTION_ID]')
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_fields(parent):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_fields(parent).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. A parent name of the form
- ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
- filter_ (str): The filter to apply to list results. Currently,
- ``FirestoreAdmin.ListFields`` only supports listing fields that have
- been explicitly overridden. To issue this query, call
- ``FirestoreAdmin.ListFields`` with the filter set to
- ``indexConfig.usesAncestorConfig:false``.
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`~google.cloud.firestore_admin_v1.types.Field` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_fields" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_fields"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_fields,
- default_retry=self._method_configs["ListFields"].retry,
- default_timeout=self._method_configs["ListFields"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.ListFieldsRequest(
- parent=parent, filter=filter_, page_size=page_size
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_fields"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="fields",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
-
- def update_field(
- self,
- field,
- update_mask=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Updates a field configuration. Currently, field updates apply only to
- single field index configuration. However, calls to
- ``FirestoreAdmin.UpdateField`` should provide a field mask to avoid
- changing any configuration that the caller isn't aware of. The field
- mask should be specified as: ``{ paths: "index_config" }``.
-
- This call returns a ``google.longrunning.Operation`` which may be used
- to track the status of the field update. The metadata for the operation
- will be the type ``FieldOperationMetadata``.
-
- To configure the default field settings for the database, use the
- special ``Field`` with resource name:
- ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
-
- Example:
- >>> from google.cloud import firestore_admin_v1
- >>>
- >>> client = firestore_admin_v1.FirestoreAdminClient()
- >>>
- >>> # TODO: Initialize `field`:
- >>> field = {}
- >>>
- >>> response = client.update_field(field)
-
- Args:
- field (Union[dict, ~google.cloud.firestore_admin_v1.types.Field]): Required. The field to be updated.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_admin_v1.types.Field`
- update_mask (Union[dict, ~google.cloud.firestore_admin_v1.types.FieldMask]): A mask, relative to the field. If specified, only configuration
- specified by this field\_mask will be updated in the field.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_admin_v1.types.FieldMask`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_admin_v1.types.Operation` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "update_field" not in self._inner_api_calls:
- self._inner_api_calls[
- "update_field"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.update_field,
- default_retry=self._method_configs["UpdateField"].retry,
- default_timeout=self._method_configs["UpdateField"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_admin_pb2.UpdateFieldRequest(
- field=field, update_mask=update_mask
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("field.name", field.name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["update_field"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
diff --git a/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py b/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py
deleted file mode 100644
index f073ae4566..0000000000
--- a/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py
+++ /dev/null
@@ -1,68 +0,0 @@
-config = {
- "interfaces": {
- "google.firestore.admin.v1.FirestoreAdmin": {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 60000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 60000,
- "total_timeout_millis": 600000,
- }
- },
- "methods": {
- "CreateIndex": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "ListIndexes": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "GetIndex": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "DeleteIndex": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "ImportDocuments": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "ExportDocuments": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "GetField": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "ListFields": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "UpdateField": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- },
- }
- }
-}
diff --git a/google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py b/google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py
deleted file mode 100644
index f1bdc01711..0000000000
--- a/google/cloud/firestore_admin_v1/gapic/transports/firestore_admin_grpc_transport.py
+++ /dev/null
@@ -1,259 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import google.api_core.grpc_helpers
-
-from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2_grpc
-
-
-class FirestoreAdminGrpcTransport(object):
- """gRPC transport class providing stubs for
- google.firestore.admin.v1 FirestoreAdmin API.
-
- The transport provides access to the raw gRPC stubs,
- which can be used to take advantage of advanced
- features of gRPC.
- """
-
- # The scopes needed to make gRPC calls to all of the methods defined
- # in this service.
- _OAUTH_SCOPES = (
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/datastore",
- )
-
- def __init__(
- self, channel=None, credentials=None, address="firestore.googleapis.com:443"
- ):
- """Instantiate the transport class.
-
- Args:
- channel (grpc.Channel): A ``Channel`` instance through
- which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- address (str): The address where the service is hosted.
- """
- # If both `channel` and `credentials` are specified, raise an
- # exception (channels come with credentials baked in already).
- if channel is not None and credentials is not None:
- raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
- )
-
- # Create the channel.
- if channel is None:
- channel = self.create_channel(
- address=address,
- credentials=credentials,
- options={
- "grpc.max_send_message_length": -1,
- "grpc.max_receive_message_length": -1,
- }.items(),
- )
-
- self._channel = channel
-
- # gRPC uses objects called "stubs" that are bound to the
- # channel and provide a basic method for each RPC.
- self._stubs = {
- "firestore_admin_stub": firestore_admin_pb2_grpc.FirestoreAdminStub(channel)
- }
-
- @classmethod
- def create_channel(
- cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
- ):
- """Create and return a gRPC channel object.
-
- Args:
- address (str): The host for the channel to use.
- credentials (~.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If
- none are specified, the client will attempt to ascertain
- the credentials from the environment.
- kwargs (dict): Keyword arguments, which are passed to the
- channel creation.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return google.api_core.grpc_helpers.create_channel(
- address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
- )
-
- @property
- def channel(self):
- """The gRPC channel used by the transport.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return self._channel
-
- @property
- def create_index(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.create_index`.
-
- Creates a composite index. This returns a
- ``google.longrunning.Operation`` which may be used to track the status
- of the creation. The metadata for the operation will be the type
- ``IndexOperationMetadata``.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].CreateIndex
-
- @property
- def list_indexes(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.list_indexes`.
-
- Lists composite indexes.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].ListIndexes
-
- @property
- def get_index(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.get_index`.
-
- Gets a composite index.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].GetIndex
-
- @property
- def delete_index(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.delete_index`.
-
- Deletes a composite index.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].DeleteIndex
-
- @property
- def import_documents(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.import_documents`.
-
- Imports documents into Google Cloud Firestore. Existing documents with the
- same name are overwritten. The import occurs in the background and its
- progress can be monitored and managed via the Operation resource that is
- created. If an ImportDocuments operation is cancelled, it is possible
- that a subset of the data has already been imported to Cloud Firestore.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].ImportDocuments
-
- @property
- def export_documents(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.export_documents`.
-
- Exports a copy of all or a subset of documents from Google Cloud Firestore
- to another storage system, such as Google Cloud Storage. Recent updates to
- documents may not be reflected in the export. The export occurs in the
- background and its progress can be monitored and managed via the
- Operation resource that is created. The output of an export may only be
- used once the associated operation is done. If an export operation is
- cancelled before completion it may leave partial data behind in Google
- Cloud Storage.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].ExportDocuments
-
- @property
- def get_field(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.get_field`.
-
- Gets the metadata and configuration for a Field.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].GetField
-
- @property
- def list_fields(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.list_fields`.
-
- Lists the field configuration and metadata for this database.
-
- Currently, ``FirestoreAdmin.ListFields`` only supports listing fields
- that have been explicitly overridden. To issue this query, call
- ``FirestoreAdmin.ListFields`` with the filter set to
- ``indexConfig.usesAncestorConfig:false``.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].ListFields
-
- @property
- def update_field(self):
- """Return the gRPC stub for :meth:`FirestoreAdminClient.update_field`.
-
- Updates a field configuration. Currently, field updates apply only to
- single field index configuration. However, calls to
- ``FirestoreAdmin.UpdateField`` should provide a field mask to avoid
- changing any configuration that the caller isn't aware of. The field
- mask should be specified as: ``{ paths: "index_config" }``.
-
- This call returns a ``google.longrunning.Operation`` which may be used
- to track the status of the field update. The metadata for the operation
- will be the type ``FieldOperationMetadata``.
-
- To configure the default field settings for the database, use the
- special ``Field`` with resource name:
- ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_admin_stub"].UpdateField
diff --git a/google/cloud/firestore_admin_v1/proto/__init__.py b/google/cloud/firestore_admin_v1/proto/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_admin_v1/proto/field.proto b/google/cloud/firestore_admin_v1/proto/field.proto
deleted file mode 100644
index 48430d87c1..0000000000
--- a/google/cloud/firestore_admin_v1/proto/field.proto
+++ /dev/null
@@ -1,99 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1;
-
-import "google/api/resource.proto";
-import "google/firestore/admin/v1/index.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "FieldProto";
-option java_package = "com.google.firestore.admin.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
-
-// Represents a single field in the database.
-//
-// Fields are grouped by their "Collection Group", which represent all
-// collections in the database with the same id.
-message Field {
- option (google.api.resource) = {
- type: "firestore.googleapis.com/Field"
- pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}"
- };
-
- // The index configuration for this field.
- message IndexConfig {
- // The indexes supported for this field.
- repeated Index indexes = 1;
-
- // Output only. When true, the `Field`'s index configuration is set from the
- // configuration specified by the `ancestor_field`.
- // When false, the `Field`'s index configuration is defined explicitly.
- bool uses_ancestor_config = 2;
-
- // Output only. Specifies the resource name of the `Field` from which this field's
- // index configuration is set (when `uses_ancestor_config` is true),
- // or from which it *would* be set if this field had no index configuration
- // (when `uses_ancestor_config` is false).
- string ancestor_field = 3;
-
- // Output only
- // When true, the `Field`'s index configuration is in the process of being
- // reverted. Once complete, the index config will transition to the same
- // state as the field specified by `ancestor_field`, at which point
- // `uses_ancestor_config` will be `true` and `reverting` will be `false`.
- bool reverting = 4;
- }
-
- // A field name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
- //
- // A field path may be a simple field name, e.g. `address` or a path to fields
- // within map_value , e.g. `address.city`,
- // or a special field path. The only valid special field is `*`, which
- // represents any field.
- //
- // Field paths may be quoted using ` (backtick). The only character that needs
- // to be escaped within a quoted field path is the backtick character itself,
- // escaped using a backslash. Special characters in field paths that
- // must be quoted include: `*`, `.`,
- // ``` (backtick), `[`, `]`, as well as any ascii symbolic characters.
- //
- // Examples:
- // (Note: Comments here are written in markdown syntax, so there is an
- // additional layer of backticks to represent a code block)
- // `\`address.city\`` represents a field named `address.city`, not the map key
- // `city` in the field `address`.
- // `\`*\`` represents a field named `*`, not any field.
- //
- // A special `Field` contains the default indexing settings for all fields.
- // This field's resource name is:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`
- // Indexes defined on this `Field` will be applied to all fields which do not
- // have their own `Field` index configuration.
- string name = 1;
-
- // The index configuration for this field. If unset, field indexing will
- // revert to the configuration defined by the `ancestor_field`. To
- // explicitly remove all indexes for this field, specify an index config
- // with an empty list of indexes.
- IndexConfig index_config = 2;
-}
diff --git a/google/cloud/firestore_admin_v1/proto/field_pb2.py b/google/cloud/firestore_admin_v1/proto/field_pb2.py
deleted file mode 100644
index 281ac78d87..0000000000
--- a/google/cloud/firestore_admin_v1/proto/field_pb2.py
+++ /dev/null
@@ -1,288 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore/admin_v1/proto/field.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
-from google.cloud.firestore_admin_v1.proto import (
- index_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2,
-)
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore/admin_v1/proto/field.proto",
- package="google.firestore.admin.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\035com.google.firestore.admin.v1B\nFieldProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1"
- ),
- serialized_pb=_b(
- '\n1google/cloud/firestore/admin_v1/proto/field.proto\x12\x19google.firestore.admin.v1\x1a\x19google/api/resource.proto\x1a\x31google/cloud/firestore/admin_v1/proto/index.proto\x1a\x1cgoogle/api/annotations.proto"\xe0\x02\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x42\n\x0cindex_config\x18\x02 \x01(\x0b\x32,.google.firestore.admin.v1.Field.IndexConfig\x1a\x89\x01\n\x0bIndexConfig\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x1c\n\x14uses_ancestor_config\x18\x02 \x01(\x08\x12\x16\n\x0e\x61ncestor_field\x18\x03 \x01(\t\x12\x11\n\treverting\x18\x04 \x01(\x08:y\xea\x41v\n\x1e\x66irestore.googleapis.com/Field\x12Tprojects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}B\xb8\x01\n\x1d\x63om.google.firestore.admin.v1B\nFieldProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_resource__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_FIELD_INDEXCONFIG = _descriptor.Descriptor(
- name="IndexConfig",
- full_name="google.firestore.admin.v1.Field.IndexConfig",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="indexes",
- full_name="google.firestore.admin.v1.Field.IndexConfig.indexes",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="uses_ancestor_config",
- full_name="google.firestore.admin.v1.Field.IndexConfig.uses_ancestor_config",
- index=1,
- number=2,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="ancestor_field",
- full_name="google.firestore.admin.v1.Field.IndexConfig.ancestor_field",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="reverting",
- full_name="google.firestore.admin.v1.Field.IndexConfig.reverting",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=281,
- serialized_end=418,
-)
-
-_FIELD = _descriptor.Descriptor(
- name="Field",
- full_name="google.firestore.admin.v1.Field",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.Field.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="index_config",
- full_name="google.firestore.admin.v1.Field.index_config",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_FIELD_INDEXCONFIG],
- enum_types=[],
- serialized_options=_b(
- "\352Av\n\036firestore.googleapis.com/Field\022Tprojects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}"
- ),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=189,
- serialized_end=541,
-)
-
-_FIELD_INDEXCONFIG.fields_by_name[
- "indexes"
-].message_type = (
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2._INDEX
-)
-_FIELD_INDEXCONFIG.containing_type = _FIELD
-_FIELD.fields_by_name["index_config"].message_type = _FIELD_INDEXCONFIG
-DESCRIPTOR.message_types_by_name["Field"] = _FIELD
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Field = _reflection.GeneratedProtocolMessageType(
- "Field",
- (_message.Message,),
- dict(
- IndexConfig=_reflection.GeneratedProtocolMessageType(
- "IndexConfig",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FIELD_INDEXCONFIG,
- __module__="google.cloud.firestore.admin_v1.proto.field_pb2",
- __doc__="""The index configuration for this field.
-
-
- Attributes:
- indexes:
- The indexes supported for this field.
- uses_ancestor_config:
- Output only. When true, the ``Field``'s index configuration is
- set from the configuration specified by the
- ``ancestor_field``. When false, the ``Field``'s index
- configuration is defined explicitly.
- ancestor_field:
- Output only. Specifies the resource name of the ``Field`` from
- which this field's index configuration is set (when
- ``uses_ancestor_config`` is true), or from which it *would* be
- set if this field had no index configuration (when
- ``uses_ancestor_config`` is false).
- reverting:
- Output only When true, the ``Field``'s index configuration is
- in the process of being reverted. Once complete, the index
- config will transition to the same state as the field
- specified by ``ancestor_field``, at which point
- ``uses_ancestor_config`` will be ``true`` and ``reverting``
- will be ``false``.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Field.IndexConfig)
- ),
- ),
- DESCRIPTOR=_FIELD,
- __module__="google.cloud.firestore.admin_v1.proto.field_pb2",
- __doc__="""Represents a single field in the database.
-
- Fields are grouped by their "Collection Group", which represent all
- collections in the database with the same id.
-
-
- Attributes:
- name:
- A field name of the form ``projects/{project_id}/databases/{da
- tabase_id}/collectionGroups/{collection_id}/fields/{field_path
- }`` A field path may be a simple field name, e.g. ``address``
- or a path to fields within map\_value , e.g. ``address.city``,
- or a special field path. The only valid special field is
- ``*``, which represents any field. Field paths may be quoted
- using ``(backtick). The only character that needs to be
- escaped within a quoted field path is the backtick character
- itself, escaped using a backslash. Special characters in field
- paths that must be quoted include:``\ \*\ ``,``.\ ``, ```
- (backtick),``\ [``,``]\`, as well as any ascii symbolic
- characters. Examples: (Note: Comments here are written in
- markdown syntax, so there is an additional layer of backticks
- to represent a code block) ``\``\ address.city\`\ ``represents
- a field named``\ address.city\ ``, not the map key``\ city\
- ``in the field``\ address\ ``.``\ \`\ *\`\ ``represents a
- field named``*\ \`, not any field. A special ``Field``
- contains the default indexing settings for all fields. This
- field's resource name is: ``projects/{project_id}/databases/{d
- atabase_id}/collectionGroups/__default__/fields/*`` Indexes
- defined on this ``Field`` will be applied to all fields which
- do not have their own ``Field`` index configuration.
- index_config:
- The index configuration for this field. If unset, field
- indexing will revert to the configuration defined by the
- ``ancestor_field``. To explicitly remove all indexes for this
- field, specify an index config with an empty list of indexes.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Field)
- ),
-)
-_sym_db.RegisterMessage(Field)
-_sym_db.RegisterMessage(Field.IndexConfig)
-
-
-DESCRIPTOR._options = None
-_FIELD._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py b/google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_admin_v1/proto/field_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_admin_v1/proto/firestore_admin.proto b/google/cloud/firestore_admin_v1/proto/firestore_admin.proto
deleted file mode 100644
index 75dd2d3113..0000000000
--- a/google/cloud/firestore_admin_v1/proto/firestore_admin.proto
+++ /dev/null
@@ -1,354 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1;
-
-import "google/api/annotations.proto";
-import "google/api/client.proto";
-import "google/api/field_behavior.proto";
-import "google/api/resource.proto";
-import "google/firestore/admin/v1/field.proto";
-import "google/firestore/admin/v1/index.proto";
-import "google/longrunning/operations.proto";
-import "google/protobuf/empty.proto";
-import "google/protobuf/field_mask.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "FirestoreAdminProto";
-option java_package = "com.google.firestore.admin.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
-option (google.api.resource_definition) = {
- type: "firestore.googleapis.com/Database"
- pattern: "projects/{project}/databases/{database}"
-};
-option (google.api.resource_definition) = {
- type: "firestore.googleapis.com/CollectionGroup"
- pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}"
-};
-
-// Operations are created by service `FirestoreAdmin`, but are accessed via
-// service `google.longrunning.Operations`.
-service FirestoreAdmin {
- option (google.api.default_host) = "firestore.googleapis.com";
- option (google.api.oauth_scopes) =
- "https://www.googleapis.com/auth/cloud-platform,"
- "https://www.googleapis.com/auth/datastore";
-
- // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation]
- // which may be used to track the status of the creation. The metadata for
- // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
- rpc CreateIndex(CreateIndexRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- post: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes"
- body: "index"
- };
- option (google.api.method_signature) = "parent,index";
- option (google.longrunning.operation_info) = {
- response_type: "Index"
- metadata_type: "IndexOperationMetadata"
- };
- }
-
- // Lists composite indexes.
- rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes"
- };
- option (google.api.method_signature) = "parent";
- }
-
- // Gets a composite index.
- rpc GetIndex(GetIndexRequest) returns (Index) {
- option (google.api.http) = {
- get: "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}"
- };
- option (google.api.method_signature) = "name";
- }
-
- // Deletes a composite index.
- rpc DeleteIndex(DeleteIndexRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- delete: "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}"
- };
- option (google.api.method_signature) = "name";
- }
-
- // Gets the metadata and configuration for a Field.
- rpc GetField(GetFieldRequest) returns (Field) {
- option (google.api.http) = {
- get: "/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}"
- };
- option (google.api.method_signature) = "name";
- }
-
- // Updates a field configuration. Currently, field updates apply only to
- // single field index configuration. However, calls to
- // [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid
- // changing any configuration that the caller isn't aware of. The field mask
- // should be specified as: `{ paths: "index_config" }`.
- //
- // This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to
- // track the status of the field update. The metadata for
- // the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
- //
- // To configure the default field settings for the database, use
- // the special `Field` with resource name:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`.
- rpc UpdateField(UpdateFieldRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- patch: "/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}"
- body: "field"
- };
- option (google.api.method_signature) = "field";
- option (google.longrunning.operation_info) = {
- response_type: "Field"
- metadata_type: "FieldOperationMetadata"
- };
- }
-
- // Lists the field configuration and metadata for this database.
- //
- // Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
- // that have been explicitly overridden. To issue this query, call
- // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
- // `indexConfig.usesAncestorConfig:false`.
- rpc ListFields(ListFieldsRequest) returns (ListFieldsResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields"
- };
- option (google.api.method_signature) = "parent";
- }
-
- // Exports a copy of all or a subset of documents from Google Cloud Firestore
- // to another storage system, such as Google Cloud Storage. Recent updates to
- // documents may not be reflected in the export. The export occurs in the
- // background and its progress can be monitored and managed via the
- // Operation resource that is created. The output of an export may only be
- // used once the associated operation is done. If an export operation is
- // cancelled before completion it may leave partial data behind in Google
- // Cloud Storage.
- rpc ExportDocuments(ExportDocumentsRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- post: "/v1/{name=projects/*/databases/*}:exportDocuments"
- body: "*"
- };
- option (google.api.method_signature) = "name";
- option (google.longrunning.operation_info) = {
- response_type: "ExportDocumentsResponse"
- metadata_type: "ExportDocumentsMetadata"
- };
- }
-
- // Imports documents into Google Cloud Firestore. Existing documents with the
- // same name are overwritten. The import occurs in the background and its
- // progress can be monitored and managed via the Operation resource that is
- // created. If an ImportDocuments operation is cancelled, it is possible
- // that a subset of the data has already been imported to Cloud Firestore.
- rpc ImportDocuments(ImportDocumentsRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- post: "/v1/{name=projects/*/databases/*}:importDocuments"
- body: "*"
- };
- option (google.api.method_signature) = "name";
- option (google.longrunning.operation_info) = {
- response_type: "google.protobuf.Empty"
- metadata_type: "ImportDocumentsMetadata"
- };
- }
-}
-
-// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
-message CreateIndexRequest {
- // Required. A parent name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/CollectionGroup"
- }
- ];
-
- // Required. The composite index to create.
- Index index = 2 [(google.api.field_behavior) = REQUIRED];
-}
-
-// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
-message ListIndexesRequest {
- // Required. A parent name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/CollectionGroup"
- }
- ];
-
- // The filter to apply to list results.
- string filter = 2;
-
- // The number of results to return.
- int32 page_size = 3;
-
- // A page token, returned from a previous call to
- // [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], that may be used to get the next
- // page of results.
- string page_token = 4;
-}
-
-// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
-message ListIndexesResponse {
- // The requested indexes.
- repeated Index indexes = 1;
-
- // A page token that may be used to request another page of results. If blank,
- // this is the last page.
- string next_page_token = 2;
-}
-
-// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
-message GetIndexRequest {
- // Required. A name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/Index"
- }
- ];
-}
-
-// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
-message DeleteIndexRequest {
- // Required. A name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/Index"
- }
- ];
-}
-
-// The request for [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
-message UpdateFieldRequest {
- // Required. The field to be updated.
- Field field = 1 [(google.api.field_behavior) = REQUIRED];
-
- // A mask, relative to the field. If specified, only configuration specified
- // by this field_mask will be updated in the field.
- google.protobuf.FieldMask update_mask = 2;
-}
-
-// The request for [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
-message GetFieldRequest {
- // Required. A name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/Field"
- }
- ];
-}
-
-// The request for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
-message ListFieldsRequest {
- // Required. A parent name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/CollectionGroup"
- }
- ];
-
- // The filter to apply to list results. Currently,
- // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
- // that have been explicitly overridden. To issue this query, call
- // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
- // `indexConfig.usesAncestorConfig:false`.
- string filter = 2;
-
- // The number of results to return.
- int32 page_size = 3;
-
- // A page token, returned from a previous call to
- // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], that may be used to get the next
- // page of results.
- string page_token = 4;
-}
-
-// The response for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
-message ListFieldsResponse {
- // The requested fields.
- repeated Field fields = 1;
-
- // A page token that may be used to request another page of results. If blank,
- // this is the last page.
- string next_page_token = 2;
-}
-
-// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
-message ExportDocumentsRequest {
- // Required. Database to export. Should be of the form:
- // `projects/{project_id}/databases/{database_id}`.
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/Database"
- }
- ];
-
- // Which collection ids to export. Unspecified means all collections.
- repeated string collection_ids = 2;
-
- // The output URI. Currently only supports Google Cloud Storage URIs of the
- // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name
- // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional
- // Google Cloud Storage namespace path. When
- // choosing a name, be sure to consider Google Cloud Storage naming
- // guidelines: https://cloud.google.com/storage/docs/naming.
- // If the URI is a bucket (without a namespace path), a prefix will be
- // generated based on the start time.
- string output_uri_prefix = 3;
-}
-
-// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
-message ImportDocumentsRequest {
- // Required. Database to import into. Should be of the form:
- // `projects/{project_id}/databases/{database_id}`.
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "firestore.googleapis.com/Database"
- }
- ];
-
- // Which collection ids to import. Unspecified means all collections included
- // in the import.
- repeated string collection_ids = 2;
-
- // Location of the exported files.
- // This must match the output_uri_prefix of an ExportDocumentsResponse from
- // an export that has completed successfully.
- // See:
- // [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix].
- string input_uri_prefix = 3;
-}
diff --git a/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py b/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py
deleted file mode 100644
index 0737cfd86e..0000000000
--- a/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py
+++ /dev/null
@@ -1,1196 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore/admin_v1/proto/firestore_admin.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.api import client_pb2 as google_dot_api_dot_client__pb2
-from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
-from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
-from google.cloud.firestore_admin_v1.proto import (
- field_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2,
-)
-from google.cloud.firestore_admin_v1.proto import (
- index_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2,
-)
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore/admin_v1/proto/firestore_admin.proto",
- package="google.firestore.admin.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\035com.google.firestore.admin.v1B\023FirestoreAdminProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1\352AL\n!firestore.googleapis.com/Database\022'projects/{project}/databases/{database}\352Aq\n(firestore.googleapis.com/CollectionGroup\022Eprojects/{project}/databases/{database}/collectionGroups/{collection}"
- ),
- serialized_pb=_b(
- '\n;google/cloud/firestore/admin_v1/proto/firestore_admin.proto\x12\x19google.firestore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x31google/cloud/firestore/admin_v1/proto/field.proto\x1a\x31google/cloud/firestore/admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x8c\x01\n\x12\x43reateIndexRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(firestore.googleapis.com/CollectionGroup\x12\x34\n\x05index\x18\x02 \x01(\x0b\x32 .google.firestore.admin.v1.IndexB\x03\xe0\x41\x02"\x8d\x01\n\x12ListIndexesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(firestore.googleapis.com/CollectionGroup\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"G\n\x0fGetIndexRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x66irestore.googleapis.com/Index"J\n\x12\x44\x65leteIndexRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x66irestore.googleapis.com/Index"{\n\x12UpdateFieldRequest\x12\x34\n\x05\x66ield\x18\x01 \x01(\x0b\x32 .google.firestore.admin.v1.FieldB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"G\n\x0fGetFieldRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x66irestore.googleapis.com/Field"\x8c\x01\n\x11ListFieldsRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(firestore.googleapis.com/CollectionGroup\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"_\n\x12ListFieldsResponse\x12\x30\n\x06\x66ields\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Field\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x16\x45xportDocumentsRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!firestore.googleapis.com/Database\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x19\n\x11output_uri_prefix\x18\x03 \x01(\t"\x83\x01\n\x16ImportDocumentsRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!firestore.googleapis.com/Database\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x18\n\x10input_uri_prefix\x18\x03 \x01(\t2\xf5\x0e\n\x0e\x46irestoreAdmin\x12\xdb\x01\n\x0b\x43reateIndex\x12-.google.firestore.admin.v1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"~\x82\xd3\xe4\x93\x02G">/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes:\x05index\xda\x41\x0cparent,index\xca\x41\x1f\n\x05Index\x12\x16IndexOperationMetadata\x12\xbd\x01\n\x0bListIndexes\x12-.google.firestore.admin.v1.ListIndexesRequest\x1a..google.firestore.admin.v1.ListIndexesResponse"O\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes\xda\x41\x06parent\x12\xa7\x01\n\x08GetIndex\x12*.google.firestore.admin.v1.GetIndexRequest\x1a .google.firestore.admin.v1.Index"M\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\xda\x41\x04name\x12\xa3\x01\n\x0b\x44\x65leteIndex\x12-.google.firestore.admin.v1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\xda\x41\x04name\x12\xa6\x01\n\x08GetField\x12*.google.firestore.admin.v1.GetFieldRequest\x1a .google.firestore.admin.v1.Field"L\x82\xd3\xe4\x93\x02?\x12=/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}\xda\x41\x04name\x12\xd9\x01\n\x0bUpdateField\x12-.google.firestore.admin.v1.UpdateFieldRequest\x1a\x1d.google.longrunning.Operation"|\x82\xd3\xe4\x93\x02L2C/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}:\x05\x66ield\xda\x41\x05\x66ield\xca\x41\x1f\n\x05\x46ield\x12\x16\x46ieldOperationMetadata\x12\xb9\x01\n\nListFields\x12,.google.firestore.admin.v1.ListFieldsRequest\x1a-.google.firestore.admin.v1.ListFieldsResponse"N\x82\xd3\xe4\x93\x02?\x12=/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields\xda\x41\x06parent\x12\xdd\x01\n\x0f\x45xportDocuments\x12\x31.google.firestore.admin.v1.ExportDocumentsRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:exportDocuments:\x01*\xda\x41\x04name\xca\x41\x32\n\x17\x45xportDocumentsResponse\x12\x17\x45xportDocumentsMetadata\x12\xdb\x01\n\x0fImportDocuments\x12\x31.google.firestore.admin.v1.ImportDocumentsRequest\x1a\x1d.google.longrunning.Operation"v\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:importDocuments:\x01*\xda\x41\x04name\xca\x41\x30\n\x15google.protobuf.Empty\x12\x17ImportDocumentsMetadata\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\x84\x03\n\x1d\x63om.google.firestore.admin.v1B\x13\x46irestoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1\xea\x41L\n!firestore.googleapis.com/Database\x12\'projects/{project}/databases/{database}\xea\x41q\n(firestore.googleapis.com/CollectionGroup\x12\x45projects/{project}/databases/{database}/collectionGroups/{collection}b\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_api_dot_client__pb2.DESCRIPTOR,
- google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
- google_dot_api_dot_resource__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
- google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,
- ],
-)
-
-
-_CREATEINDEXREQUEST = _descriptor.Descriptor(
- name="CreateIndexRequest",
- full_name="google.firestore.admin.v1.CreateIndexRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.admin.v1.CreateIndexRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A*\n(firestore.googleapis.com/CollectionGroup"
- ),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="index",
- full_name="google.firestore.admin.v1.CreateIndexRequest.index",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=408,
- serialized_end=548,
-)
-
-
-_LISTINDEXESREQUEST = _descriptor.Descriptor(
- name="ListIndexesRequest",
- full_name="google.firestore.admin.v1.ListIndexesRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.admin.v1.ListIndexesRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A*\n(firestore.googleapis.com/CollectionGroup"
- ),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="filter",
- full_name="google.firestore.admin.v1.ListIndexesRequest.filter",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_size",
- full_name="google.firestore.admin.v1.ListIndexesRequest.page_size",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_token",
- full_name="google.firestore.admin.v1.ListIndexesRequest.page_token",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=551,
- serialized_end=692,
-)
-
-
-_LISTINDEXESRESPONSE = _descriptor.Descriptor(
- name="ListIndexesResponse",
- full_name="google.firestore.admin.v1.ListIndexesResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="indexes",
- full_name="google.firestore.admin.v1.ListIndexesResponse.indexes",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="next_page_token",
- full_name="google.firestore.admin.v1.ListIndexesResponse.next_page_token",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=694,
- serialized_end=791,
-)
-
-
-_GETINDEXREQUEST = _descriptor.Descriptor(
- name="GetIndexRequest",
- full_name="google.firestore.admin.v1.GetIndexRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.GetIndexRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A \n\036firestore.googleapis.com/Index"
- ),
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=793,
- serialized_end=864,
-)
-
-
-_DELETEINDEXREQUEST = _descriptor.Descriptor(
- name="DeleteIndexRequest",
- full_name="google.firestore.admin.v1.DeleteIndexRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.DeleteIndexRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A \n\036firestore.googleapis.com/Index"
- ),
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=866,
- serialized_end=940,
-)
-
-
-_UPDATEFIELDREQUEST = _descriptor.Descriptor(
- name="UpdateFieldRequest",
- full_name="google.firestore.admin.v1.UpdateFieldRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field",
- full_name="google.firestore.admin.v1.UpdateFieldRequest.field",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_mask",
- full_name="google.firestore.admin.v1.UpdateFieldRequest.update_mask",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=942,
- serialized_end=1065,
-)
-
-
-_GETFIELDREQUEST = _descriptor.Descriptor(
- name="GetFieldRequest",
- full_name="google.firestore.admin.v1.GetFieldRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.GetFieldRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A \n\036firestore.googleapis.com/Field"
- ),
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1067,
- serialized_end=1138,
-)
-
-
-_LISTFIELDSREQUEST = _descriptor.Descriptor(
- name="ListFieldsRequest",
- full_name="google.firestore.admin.v1.ListFieldsRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.admin.v1.ListFieldsRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A*\n(firestore.googleapis.com/CollectionGroup"
- ),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="filter",
- full_name="google.firestore.admin.v1.ListFieldsRequest.filter",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_size",
- full_name="google.firestore.admin.v1.ListFieldsRequest.page_size",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_token",
- full_name="google.firestore.admin.v1.ListFieldsRequest.page_token",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1141,
- serialized_end=1281,
-)
-
-
-_LISTFIELDSRESPONSE = _descriptor.Descriptor(
- name="ListFieldsResponse",
- full_name="google.firestore.admin.v1.ListFieldsResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.firestore.admin.v1.ListFieldsResponse.fields",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="next_page_token",
- full_name="google.firestore.admin.v1.ListFieldsResponse.next_page_token",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1283,
- serialized_end=1378,
-)
-
-
-_EXPORTDOCUMENTSREQUEST = _descriptor.Descriptor(
- name="ExportDocumentsRequest",
- full_name="google.firestore.admin.v1.ExportDocumentsRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.ExportDocumentsRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A#\n!firestore.googleapis.com/Database"
- ),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="collection_ids",
- full_name="google.firestore.admin.v1.ExportDocumentsRequest.collection_ids",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="output_uri_prefix",
- full_name="google.firestore.admin.v1.ExportDocumentsRequest.output_uri_prefix",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1381,
- serialized_end=1513,
-)
-
-
-_IMPORTDOCUMENTSREQUEST = _descriptor.Descriptor(
- name="ImportDocumentsRequest",
- full_name="google.firestore.admin.v1.ImportDocumentsRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.ImportDocumentsRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b(
- "\340A\002\372A#\n!firestore.googleapis.com/Database"
- ),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="collection_ids",
- full_name="google.firestore.admin.v1.ImportDocumentsRequest.collection_ids",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="input_uri_prefix",
- full_name="google.firestore.admin.v1.ImportDocumentsRequest.input_uri_prefix",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1516,
- serialized_end=1647,
-)
-
-_CREATEINDEXREQUEST.fields_by_name[
- "index"
-].message_type = (
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2._INDEX
-)
-_LISTINDEXESRESPONSE.fields_by_name[
- "indexes"
-].message_type = (
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2._INDEX
-)
-_UPDATEFIELDREQUEST.fields_by_name[
- "field"
-].message_type = (
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2._FIELD
-)
-_UPDATEFIELDREQUEST.fields_by_name[
- "update_mask"
-].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
-_LISTFIELDSRESPONSE.fields_by_name[
- "fields"
-].message_type = (
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2._FIELD
-)
-DESCRIPTOR.message_types_by_name["CreateIndexRequest"] = _CREATEINDEXREQUEST
-DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST
-DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE
-DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST
-DESCRIPTOR.message_types_by_name["DeleteIndexRequest"] = _DELETEINDEXREQUEST
-DESCRIPTOR.message_types_by_name["UpdateFieldRequest"] = _UPDATEFIELDREQUEST
-DESCRIPTOR.message_types_by_name["GetFieldRequest"] = _GETFIELDREQUEST
-DESCRIPTOR.message_types_by_name["ListFieldsRequest"] = _LISTFIELDSREQUEST
-DESCRIPTOR.message_types_by_name["ListFieldsResponse"] = _LISTFIELDSRESPONSE
-DESCRIPTOR.message_types_by_name["ExportDocumentsRequest"] = _EXPORTDOCUMENTSREQUEST
-DESCRIPTOR.message_types_by_name["ImportDocumentsRequest"] = _IMPORTDOCUMENTSREQUEST
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-CreateIndexRequest = _reflection.GeneratedProtocolMessageType(
- "CreateIndexRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATEINDEXREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
-
-
- Attributes:
- parent:
- Required. A parent name of the form ``projects/{project_id}/da
- tabases/{database_id}/collectionGroups/{collection_id}``
- index:
- Required. The composite index to create.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.CreateIndexRequest)
- ),
-)
-_sym_db.RegisterMessage(CreateIndexRequest)
-
-ListIndexesRequest = _reflection.GeneratedProtocolMessageType(
- "ListIndexesRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINDEXESREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
-
-
- Attributes:
- parent:
- Required. A parent name of the form ``projects/{project_id}/da
- tabases/{database_id}/collectionGroups/{collection_id}``
- filter:
- The filter to apply to list results.
- page_size:
- The number of results to return.
- page_token:
- A page token, returned from a previous call to [FirestoreAdmin
- .ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListInd
- exes], that may be used to get the next page of results.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListIndexesRequest)
- ),
-)
-_sym_db.RegisterMessage(ListIndexesRequest)
-
-ListIndexesResponse = _reflection.GeneratedProtocolMessageType(
- "ListIndexesResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINDEXESRESPONSE,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The response for
- [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
-
-
- Attributes:
- indexes:
- The requested indexes.
- next_page_token:
- A page token that may be used to request another page of
- results. If blank, this is the last page.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListIndexesResponse)
- ),
-)
-_sym_db.RegisterMessage(ListIndexesResponse)
-
-GetIndexRequest = _reflection.GeneratedProtocolMessageType(
- "GetIndexRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETINDEXREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
-
-
- Attributes:
- name:
- Required. A name of the form ``projects/{project_id}/databases
- /{database_id}/collectionGroups/{collection_id}/indexes/{index
- _id}``
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.GetIndexRequest)
- ),
-)
-_sym_db.RegisterMessage(GetIndexRequest)
-
-DeleteIndexRequest = _reflection.GeneratedProtocolMessageType(
- "DeleteIndexRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETEINDEXREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
-
-
- Attributes:
- name:
- Required. A name of the form ``projects/{project_id}/databases
- /{database_id}/collectionGroups/{collection_id}/indexes/{index
- _id}``
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.DeleteIndexRequest)
- ),
-)
-_sym_db.RegisterMessage(DeleteIndexRequest)
-
-UpdateFieldRequest = _reflection.GeneratedProtocolMessageType(
- "UpdateFieldRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEFIELDREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
-
-
- Attributes:
- field:
- Required. The field to be updated.
- update_mask:
- A mask, relative to the field. If specified, only
- configuration specified by this field\_mask will be updated in
- the field.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.UpdateFieldRequest)
- ),
-)
-_sym_db.RegisterMessage(UpdateFieldRequest)
-
-GetFieldRequest = _reflection.GeneratedProtocolMessageType(
- "GetFieldRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETFIELDREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
-
-
- Attributes:
- name:
- Required. A name of the form ``projects/{project_id}/databases
- /{database_id}/collectionGroups/{collection_id}/fields/{field_
- id}``
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.GetFieldRequest)
- ),
-)
-_sym_db.RegisterMessage(GetFieldRequest)
-
-ListFieldsRequest = _reflection.GeneratedProtocolMessageType(
- "ListFieldsRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTFIELDSREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
-
-
- Attributes:
- parent:
- Required. A parent name of the form ``projects/{project_id}/da
- tabases/{database_id}/collectionGroups/{collection_id}``
- filter:
- The filter to apply to list results. Currently, [FirestoreAdmi
- n.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFie
- lds] only supports listing fields that have been explicitly
- overridden. To issue this query, call [FirestoreAdmin.ListFiel
- ds][google.firestore.admin.v1.FirestoreAdmin.ListFields] with
- the filter set to ``indexConfig.usesAncestorConfig:false``.
- page_size:
- The number of results to return.
- page_token:
- A page token, returned from a previous call to [FirestoreAdmin
- .ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFiel
- ds], that may be used to get the next page of results.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListFieldsRequest)
- ),
-)
-_sym_db.RegisterMessage(ListFieldsRequest)
-
-ListFieldsResponse = _reflection.GeneratedProtocolMessageType(
- "ListFieldsResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTFIELDSRESPONSE,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The response for
- [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
-
-
- Attributes:
- fields:
- The requested fields.
- next_page_token:
- A page token that may be used to request another page of
- results. If blank, this is the last page.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ListFieldsResponse)
- ),
-)
-_sym_db.RegisterMessage(ListFieldsResponse)
-
-ExportDocumentsRequest = _reflection.GeneratedProtocolMessageType(
- "ExportDocumentsRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_EXPORTDOCUMENTSREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
-
-
- Attributes:
- name:
- Required. Database to export. Should be of the form:
- ``projects/{project_id}/databases/{database_id}``.
- collection_ids:
- Which collection ids to export. Unspecified means all
- collections.
- output_uri_prefix:
- The output URI. Currently only supports Google Cloud Storage
- URIs of the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where
- ``BUCKET_NAME`` is the name of the Google Cloud Storage bucket
- and ``NAMESPACE_PATH`` is an optional Google Cloud Storage
- namespace path. When choosing a name, be sure to consider
- Google Cloud Storage naming guidelines:
- https://cloud.google.com/storage/docs/naming. If the URI is a
- bucket (without a namespace path), a prefix will be generated
- based on the start time.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ExportDocumentsRequest)
- ),
-)
-_sym_db.RegisterMessage(ExportDocumentsRequest)
-
-ImportDocumentsRequest = _reflection.GeneratedProtocolMessageType(
- "ImportDocumentsRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_IMPORTDOCUMENTSREQUEST,
- __module__="google.cloud.firestore.admin_v1.proto.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
-
-
- Attributes:
- name:
- Required. Database to import into. Should be of the form:
- ``projects/{project_id}/databases/{database_id}``.
- collection_ids:
- Which collection ids to import. Unspecified means all
- collections included in the import.
- input_uri_prefix:
- Location of the exported files. This must match the
- output\_uri\_prefix of an ExportDocumentsResponse from an
- export that has completed successfully. See: [google.firestore
- .admin.v1.ExportDocumentsResponse.output\_uri\_prefix][google.
- firestore.admin.v1.ExportDocumentsResponse.output\_uri\_prefix
- ].
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ImportDocumentsRequest)
- ),
-)
-_sym_db.RegisterMessage(ImportDocumentsRequest)
-
-
-DESCRIPTOR._options = None
-_CREATEINDEXREQUEST.fields_by_name["parent"]._options = None
-_CREATEINDEXREQUEST.fields_by_name["index"]._options = None
-_LISTINDEXESREQUEST.fields_by_name["parent"]._options = None
-_GETINDEXREQUEST.fields_by_name["name"]._options = None
-_DELETEINDEXREQUEST.fields_by_name["name"]._options = None
-_UPDATEFIELDREQUEST.fields_by_name["field"]._options = None
-_GETFIELDREQUEST.fields_by_name["name"]._options = None
-_LISTFIELDSREQUEST.fields_by_name["parent"]._options = None
-_EXPORTDOCUMENTSREQUEST.fields_by_name["name"]._options = None
-_IMPORTDOCUMENTSREQUEST.fields_by_name["name"]._options = None
-
-_FIRESTOREADMIN = _descriptor.ServiceDescriptor(
- name="FirestoreAdmin",
- full_name="google.firestore.admin.v1.FirestoreAdmin",
- file=DESCRIPTOR,
- index=0,
- serialized_options=_b(
- "\312A\030firestore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore"
- ),
- serialized_start=1650,
- serialized_end=3559,
- methods=[
- _descriptor.MethodDescriptor(
- name="CreateIndex",
- full_name="google.firestore.admin.v1.FirestoreAdmin.CreateIndex",
- index=0,
- containing_service=None,
- input_type=_CREATEINDEXREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\002G">/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes:\005index\332A\014parent,index\312A\037\n\005Index\022\026IndexOperationMetadata'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ListIndexes",
- full_name="google.firestore.admin.v1.FirestoreAdmin.ListIndexes",
- index=1,
- containing_service=None,
- input_type=_LISTINDEXESREQUEST,
- output_type=_LISTINDEXESRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002@\022>/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes\332A\006parent"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="GetIndex",
- full_name="google.firestore.admin.v1.FirestoreAdmin.GetIndex",
- index=2,
- containing_service=None,
- input_type=_GETINDEXREQUEST,
- output_type=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2._INDEX,
- serialized_options=_b(
- "\202\323\344\223\002@\022>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\332A\004name"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="DeleteIndex",
- full_name="google.firestore.admin.v1.FirestoreAdmin.DeleteIndex",
- index=3,
- containing_service=None,
- input_type=_DELETEINDEXREQUEST,
- output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- "\202\323\344\223\002@*>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\332A\004name"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="GetField",
- full_name="google.firestore.admin.v1.FirestoreAdmin.GetField",
- index=4,
- containing_service=None,
- input_type=_GETFIELDREQUEST,
- output_type=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2._FIELD,
- serialized_options=_b(
- "\202\323\344\223\002?\022=/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}\332A\004name"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="UpdateField",
- full_name="google.firestore.admin.v1.FirestoreAdmin.UpdateField",
- index=5,
- containing_service=None,
- input_type=_UPDATEFIELDREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002L2C/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}:\005field\332A\005field\312A\037\n\005Field\022\026FieldOperationMetadata"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ListFields",
- full_name="google.firestore.admin.v1.FirestoreAdmin.ListFields",
- index=6,
- containing_service=None,
- input_type=_LISTFIELDSREQUEST,
- output_type=_LISTFIELDSRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002?\022=/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields\332A\006parent"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ExportDocuments",
- full_name="google.firestore.admin.v1.FirestoreAdmin.ExportDocuments",
- index=7,
- containing_service=None,
- input_type=_EXPORTDOCUMENTSREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\0026"1/v1/{name=projects/*/databases/*}:exportDocuments:\001*\332A\004name\312A2\n\027ExportDocumentsResponse\022\027ExportDocumentsMetadata'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ImportDocuments",
- full_name="google.firestore.admin.v1.FirestoreAdmin.ImportDocuments",
- index=8,
- containing_service=None,
- input_type=_IMPORTDOCUMENTSREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\0026"1/v1/{name=projects/*/databases/*}:importDocuments:\001*\332A\004name\312A0\n\025google.protobuf.Empty\022\027ImportDocumentsMetadata'
- ),
- ),
- ],
-)
-_sym_db.RegisterServiceDescriptor(_FIRESTOREADMIN)
-
-DESCRIPTOR.services_by_name["FirestoreAdmin"] = _FIRESTOREADMIN
-
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py b/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py
deleted file mode 100644
index 269e920b3a..0000000000
--- a/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2_grpc.py
+++ /dev/null
@@ -1,227 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-from google.cloud.firestore_admin_v1.proto import (
- field_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2,
-)
-from google.cloud.firestore_admin_v1.proto import (
- firestore_admin_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2,
-)
-from google.cloud.firestore_admin_v1.proto import (
- index_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2,
-)
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-
-
-class FirestoreAdminStub(object):
- """Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.CreateIndex = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ListIndexes = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
- )
- self.GetIndex = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2.Index.FromString,
- )
- self.DeleteIndex = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
- self.GetField = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/GetField",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2.Field.FromString,
- )
- self.UpdateField = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ListFields = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/ListFields",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.FromString,
- )
- self.ExportDocuments = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ImportDocuments = channel.unary_unary(
- "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
- request_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
-
-
-class FirestoreAdminServicer(object):
- """Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def CreateIndex(self, request, context):
- """Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation]
- which may be used to track the status of the creation. The metadata for
- the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ListIndexes(self, request, context):
- """Lists composite indexes.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def GetIndex(self, request, context):
- """Gets a composite index.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def DeleteIndex(self, request, context):
- """Deletes a composite index.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def GetField(self, request, context):
- """Gets the metadata and configuration for a Field.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def UpdateField(self, request, context):
- """Updates a field configuration. Currently, field updates apply only to
- single field index configuration. However, calls to
- [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid
- changing any configuration that the caller isn't aware of. The field mask
- should be specified as: `{ paths: "index_config" }`.
-
- This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to
- track the status of the field update. The metadata for
- the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
-
- To configure the default field settings for the database, use
- the special `Field` with resource name:
- `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ListFields(self, request, context):
- """Lists the field configuration and metadata for this database.
-
- Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields
- that have been explicitly overridden. To issue this query, call
- [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to
- `indexConfig.usesAncestorConfig:false`.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ExportDocuments(self, request, context):
- """Exports a copy of all or a subset of documents from Google Cloud Firestore
- to another storage system, such as Google Cloud Storage. Recent updates to
- documents may not be reflected in the export. The export occurs in the
- background and its progress can be monitored and managed via the
- Operation resource that is created. The output of an export may only be
- used once the associated operation is done. If an export operation is
- cancelled before completion it may leave partial data behind in Google
- Cloud Storage.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ImportDocuments(self, request, context):
- """Imports documents into Google Cloud Firestore. Existing documents with the
- same name are overwritten. The import occurs in the background and its
- progress can be monitored and managed via the Operation resource that is
- created. If an ImportDocuments operation is cancelled, it is possible
- that a subset of the data has already been imported to Cloud Firestore.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
-
-def add_FirestoreAdminServicer_to_server(servicer, server):
- rpc_method_handlers = {
- "CreateIndex": grpc.unary_unary_rpc_method_handler(
- servicer.CreateIndex,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.CreateIndexRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ListIndexes": grpc.unary_unary_rpc_method_handler(
- servicer.ListIndexes,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString,
- ),
- "GetIndex": grpc.unary_unary_rpc_method_handler(
- servicer.GetIndex,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.GetIndexRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString,
- ),
- "DeleteIndex": grpc.unary_unary_rpc_method_handler(
- servicer.DeleteIndex,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.DeleteIndexRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- "GetField": grpc.unary_unary_rpc_method_handler(
- servicer.GetField,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.GetFieldRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_field__pb2.Field.SerializeToString,
- ),
- "UpdateField": grpc.unary_unary_rpc_method_handler(
- servicer.UpdateField,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.UpdateFieldRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ListFields": grpc.unary_unary_rpc_method_handler(
- servicer.ListFields,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ListFieldsResponse.SerializeToString,
- ),
- "ExportDocuments": grpc.unary_unary_rpc_method_handler(
- servicer.ExportDocuments,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ExportDocumentsRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ImportDocuments": grpc.unary_unary_rpc_method_handler(
- servicer.ImportDocuments,
- request_deserializer=google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_firestore__admin__pb2.ImportDocumentsRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- "google.firestore.admin.v1.FirestoreAdmin", rpc_method_handlers
- )
- server.add_generic_rpc_handlers((generic_handler,))
diff --git a/google/cloud/firestore_admin_v1/proto/index.proto b/google/cloud/firestore_admin_v1/proto/index.proto
deleted file mode 100644
index 4b9c6e35b1..0000000000
--- a/google/cloud/firestore_admin_v1/proto/index.proto
+++ /dev/null
@@ -1,157 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1;
-
-import "google/api/resource.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "IndexProto";
-option java_package = "com.google.firestore.admin.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
-
-// Cloud Firestore indexes enable simple and complex queries against
-// documents in a database.
-message Index {
- option (google.api.resource) = {
- type: "firestore.googleapis.com/Index"
- pattern: "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}"
- };
-
- // A field in an index.
- // The field_path describes which field is indexed, the value_mode describes
- // how the field value is indexed.
- message IndexField {
- // The supported orderings.
- enum Order {
- // The ordering is unspecified. Not a valid option.
- ORDER_UNSPECIFIED = 0;
-
- // The field is ordered by ascending field value.
- ASCENDING = 1;
-
- // The field is ordered by descending field value.
- DESCENDING = 2;
- }
-
- // The supported array value configurations.
- enum ArrayConfig {
- // The index does not support additional array queries.
- ARRAY_CONFIG_UNSPECIFIED = 0;
-
- // The index supports array containment queries.
- CONTAINS = 1;
- }
-
- // Can be __name__.
- // For single field indexes, this must match the name of the field or may
- // be omitted.
- string field_path = 1;
-
- // How the field value is indexed.
- oneof value_mode {
- // Indicates that this field supports ordering by the specified order or
- // comparing using =, <, <=, >, >=.
- Order order = 2;
-
- // Indicates that this field supports operations on `array_value`s.
- ArrayConfig array_config = 3;
- }
- }
-
- // Query Scope defines the scope at which a query is run. This is specified on
- // a StructuredQuery's `from` field.
- enum QueryScope {
- // The query scope is unspecified. Not a valid option.
- QUERY_SCOPE_UNSPECIFIED = 0;
-
- // Indexes with a collection query scope specified allow queries
- // against a collection that is the child of a specific document, specified
- // at query time, and that has the collection id specified by the index.
- COLLECTION = 1;
-
- // Indexes with a collection group query scope specified allow queries
- // against all collections that has the collection id specified by the
- // index.
- COLLECTION_GROUP = 2;
- }
-
- // The state of an index. During index creation, an index will be in the
- // `CREATING` state. If the index is created successfully, it will transition
- // to the `READY` state. If the index creation encounters a problem, the index
- // will transition to the `NEEDS_REPAIR` state.
- enum State {
- // The state is unspecified.
- STATE_UNSPECIFIED = 0;
-
- // The index is being created.
- // There is an active long-running operation for the index.
- // The index is updated when writing a document.
- // Some index data may exist.
- CREATING = 1;
-
- // The index is ready to be used.
- // The index is updated when writing a document.
- // The index is fully populated from all stored documents it applies to.
- READY = 2;
-
- // The index was being created, but something went wrong.
- // There is no active long-running operation for the index,
- // and the most recently finished long-running operation failed.
- // The index is not updated when writing a document.
- // Some index data may exist.
- // Use the google.longrunning.Operations API to determine why the operation
- // that last attempted to create this index failed, then re-create the
- // index.
- NEEDS_REPAIR = 3;
- }
-
- // Output only. A server defined name for this index.
- // The form of this name for composite indexes will be:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}`
- // For single field indexes, this field will be empty.
- string name = 1;
-
- // Indexes with a collection query scope specified allow queries
- // against a collection that is the child of a specific document, specified at
- // query time, and that has the same collection id.
- //
- // Indexes with a collection group query scope specified allow queries against
- // all collections descended from a specific document, specified at query
- // time, and that have the same collection id as this index.
- QueryScope query_scope = 2;
-
- // The fields supported by this index.
- //
- // For composite indexes, this is always 2 or more fields.
- // The last field entry is always for the field path `__name__`. If, on
- // creation, `__name__` was not specified as the last field, it will be added
- // automatically with the same direction as that of the last field defined. If
- // the final field in a composite index is not directional, the `__name__`
- // will be ordered ASCENDING (unless explicitly specified).
- //
- // For single field indexes, this will always be exactly one entry with a
- // field path equal to the field path of the associated field.
- repeated IndexField fields = 3;
-
- // Output only. The serving state of the index.
- State state = 4;
-}
diff --git a/google/cloud/firestore_admin_v1/proto/index_pb2.py b/google/cloud/firestore_admin_v1/proto/index_pb2.py
deleted file mode 100644
index 85356236dd..0000000000
--- a/google/cloud/firestore_admin_v1/proto/index_pb2.py
+++ /dev/null
@@ -1,429 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore/admin_v1/proto/index.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore/admin_v1/proto/index.proto",
- package="google.firestore.admin.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\035com.google.firestore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1"
- ),
- serialized_pb=_b(
- '\n1google/cloud/firestore/admin_v1/proto/index.proto\x12\x19google.firestore.admin.v1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xa3\x06\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x0bquery_scope\x18\x02 \x01(\x0e\x32+.google.firestore.admin.v1.Index.QueryScope\x12;\n\x06\x66ields\x18\x03 \x03(\x0b\x32+.google.firestore.admin.v1.Index.IndexField\x12\x35\n\x05state\x18\x04 \x01(\x0e\x32&.google.firestore.admin.v1.Index.State\x1a\xbd\x02\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x42\n\x05order\x18\x02 \x01(\x0e\x32\x31.google.firestore.admin.v1.Index.IndexField.OrderH\x00\x12O\n\x0c\x61rray_config\x18\x03 \x01(\x0e\x32\x37.google.firestore.admin.v1.Index.IndexField.ArrayConfigH\x00"=\n\x05Order\x12\x15\n\x11ORDER_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"9\n\x0b\x41rrayConfig\x12\x1c\n\x18\x41RRAY_CONFIG_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43ONTAINS\x10\x01\x42\x0c\n\nvalue_mode"O\n\nQueryScope\x12\x1b\n\x17QUERY_SCOPE_UNSPECIFIED\x10\x00\x12\x0e\n\nCOLLECTION\x10\x01\x12\x14\n\x10\x43OLLECTION_GROUP\x10\x02"I\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x10\n\x0cNEEDS_REPAIR\x10\x03:z\xea\x41w\n\x1e\x66irestore.googleapis.com/Index\x12Uprojects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}B\xb8\x01\n\x1d\x63om.google.firestore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_resource__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_INDEX_INDEXFIELD_ORDER = _descriptor.EnumDescriptor(
- name="Order",
- full_name="google.firestore.admin.v1.Index.IndexField.Order",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="ORDER_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="ASCENDING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DESCENDING", index=2, number=2, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=527,
- serialized_end=588,
-)
-_sym_db.RegisterEnumDescriptor(_INDEX_INDEXFIELD_ORDER)
-
-_INDEX_INDEXFIELD_ARRAYCONFIG = _descriptor.EnumDescriptor(
- name="ArrayConfig",
- full_name="google.firestore.admin.v1.Index.IndexField.ArrayConfig",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="ARRAY_CONFIG_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="CONTAINS", index=1, number=1, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=590,
- serialized_end=647,
-)
-_sym_db.RegisterEnumDescriptor(_INDEX_INDEXFIELD_ARRAYCONFIG)
-
-_INDEX_QUERYSCOPE = _descriptor.EnumDescriptor(
- name="QueryScope",
- full_name="google.firestore.admin.v1.Index.QueryScope",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="QUERY_SCOPE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="COLLECTION", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="COLLECTION_GROUP",
- index=2,
- number=2,
- serialized_options=None,
- type=None,
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=663,
- serialized_end=742,
-)
-_sym_db.RegisterEnumDescriptor(_INDEX_QUERYSCOPE)
-
-_INDEX_STATE = _descriptor.EnumDescriptor(
- name="State",
- full_name="google.firestore.admin.v1.Index.State",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="STATE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="CREATING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="READY", index=2, number=2, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="NEEDS_REPAIR", index=3, number=3, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=744,
- serialized_end=817,
-)
-_sym_db.RegisterEnumDescriptor(_INDEX_STATE)
-
-
-_INDEX_INDEXFIELD = _descriptor.Descriptor(
- name="IndexField",
- full_name="google.firestore.admin.v1.Index.IndexField",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field_path",
- full_name="google.firestore.admin.v1.Index.IndexField.field_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="order",
- full_name="google.firestore.admin.v1.Index.IndexField.order",
- index=1,
- number=2,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="array_config",
- full_name="google.firestore.admin.v1.Index.IndexField.array_config",
- index=2,
- number=3,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_INDEX_INDEXFIELD_ORDER, _INDEX_INDEXFIELD_ARRAYCONFIG],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="value_mode",
- full_name="google.firestore.admin.v1.Index.IndexField.value_mode",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=344,
- serialized_end=661,
-)
-
-_INDEX = _descriptor.Descriptor(
- name="Index",
- full_name="google.firestore.admin.v1.Index",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1.Index.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query_scope",
- full_name="google.firestore.admin.v1.Index.query_scope",
- index=1,
- number=2,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.firestore.admin.v1.Index.fields",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="state",
- full_name="google.firestore.admin.v1.Index.state",
- index=3,
- number=4,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_INDEX_INDEXFIELD],
- enum_types=[_INDEX_QUERYSCOPE, _INDEX_STATE],
- serialized_options=_b(
- "\352Aw\n\036firestore.googleapis.com/Index\022Uprojects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}"
- ),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=138,
- serialized_end=941,
-)
-
-_INDEX_INDEXFIELD.fields_by_name["order"].enum_type = _INDEX_INDEXFIELD_ORDER
-_INDEX_INDEXFIELD.fields_by_name[
- "array_config"
-].enum_type = _INDEX_INDEXFIELD_ARRAYCONFIG
-_INDEX_INDEXFIELD.containing_type = _INDEX
-_INDEX_INDEXFIELD_ORDER.containing_type = _INDEX_INDEXFIELD
-_INDEX_INDEXFIELD_ARRAYCONFIG.containing_type = _INDEX_INDEXFIELD
-_INDEX_INDEXFIELD.oneofs_by_name["value_mode"].fields.append(
- _INDEX_INDEXFIELD.fields_by_name["order"]
-)
-_INDEX_INDEXFIELD.fields_by_name[
- "order"
-].containing_oneof = _INDEX_INDEXFIELD.oneofs_by_name["value_mode"]
-_INDEX_INDEXFIELD.oneofs_by_name["value_mode"].fields.append(
- _INDEX_INDEXFIELD.fields_by_name["array_config"]
-)
-_INDEX_INDEXFIELD.fields_by_name[
- "array_config"
-].containing_oneof = _INDEX_INDEXFIELD.oneofs_by_name["value_mode"]
-_INDEX.fields_by_name["query_scope"].enum_type = _INDEX_QUERYSCOPE
-_INDEX.fields_by_name["fields"].message_type = _INDEX_INDEXFIELD
-_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE
-_INDEX_QUERYSCOPE.containing_type = _INDEX
-_INDEX_STATE.containing_type = _INDEX
-DESCRIPTOR.message_types_by_name["Index"] = _INDEX
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Index = _reflection.GeneratedProtocolMessageType(
- "Index",
- (_message.Message,),
- dict(
- IndexField=_reflection.GeneratedProtocolMessageType(
- "IndexField",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INDEX_INDEXFIELD,
- __module__="google.cloud.firestore.admin_v1.proto.index_pb2",
- __doc__="""A field in an index. The field\_path describes which field
- is indexed, the value\_mode describes how the field value is indexed.
-
-
- Attributes:
- field_path:
- Can be **name**. For single field indexes, this must match the
- name of the field or may be omitted.
- value_mode:
- How the field value is indexed.
- order:
- Indicates that this field supports ordering by the specified
- order or comparing using =, <, <=, >, >=.
- array_config:
- Indicates that this field supports operations on
- ``array_value``\ s.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Index.IndexField)
- ),
- ),
- DESCRIPTOR=_INDEX,
- __module__="google.cloud.firestore.admin_v1.proto.index_pb2",
- __doc__="""Cloud Firestore indexes enable simple and complex queries
- against documents in a database.
-
-
- Attributes:
- name:
- Output only. A server defined name for this index. The form of
- this name for composite indexes will be: ``projects/{project_i
- d}/databases/{database_id}/collectionGroups/{collection_id}/in
- dexes/{composite_index_id}`` For single field indexes, this
- field will be empty.
- query_scope:
- Indexes with a collection query scope specified allow queries
- against a collection that is the child of a specific document,
- specified at query time, and that has the same collection id.
- Indexes with a collection group query scope specified allow
- queries against all collections descended from a specific
- document, specified at query time, and that have the same
- collection id as this index.
- fields:
- The fields supported by this index. For composite indexes,
- this is always 2 or more fields. The last field entry is
- always for the field path ``__name__``. If, on creation,
- ``__name__`` was not specified as the last field, it will be
- added automatically with the same direction as that of the
- last field defined. If the final field in a composite index is
- not directional, the ``__name__`` will be ordered ASCENDING
- (unless explicitly specified). For single field indexes, this
- will always be exactly one entry with a field path equal to
- the field path of the associated field.
- state:
- Output only. The serving state of the index.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Index)
- ),
-)
-_sym_db.RegisterMessage(Index)
-_sym_db.RegisterMessage(Index.IndexField)
-
-
-DESCRIPTOR._options = None
-_INDEX._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py b/google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_admin_v1/proto/index_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_admin_v1/proto/location.proto b/google/cloud/firestore_admin_v1/proto/location.proto
deleted file mode 100644
index d9dc6f9b98..0000000000
--- a/google/cloud/firestore_admin_v1/proto/location.proto
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1;
-
-import "google/type/latlng.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "LocationProto";
-option java_package = "com.google.firestore.admin.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
-
-// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
-message LocationMetadata {
-
-}
diff --git a/google/cloud/firestore_admin_v1/proto/location_pb2.py b/google/cloud/firestore_admin_v1/proto/location_pb2.py
deleted file mode 100644
index 7825895411..0000000000
--- a/google/cloud/firestore_admin_v1/proto/location_pb2.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore/admin_v1/proto/location.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore/admin_v1/proto/location.proto",
- package="google.firestore.admin.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\035com.google.firestore.admin.v1B\rLocationProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1"
- ),
- serialized_pb=_b(
- '\n4google/cloud/firestore/admin_v1/proto/location.proto\x12\x19google.firestore.admin.v1\x1a\x18google/type/latlng.proto\x1a\x1cgoogle/api/annotations.proto"\x12\n\x10LocationMetadataB\xbb\x01\n\x1d\x63om.google.firestore.admin.v1B\rLocationProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3'
- ),
- dependencies=[
- google_dot_type_dot_latlng__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_LOCATIONMETADATA = _descriptor.Descriptor(
- name="LocationMetadata",
- full_name="google.firestore.admin.v1.LocationMetadata",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=139,
- serialized_end=157,
-)
-
-DESCRIPTOR.message_types_by_name["LocationMetadata"] = _LOCATIONMETADATA
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-LocationMetadata = _reflection.GeneratedProtocolMessageType(
- "LocationMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LOCATIONMETADATA,
- __module__="google.cloud.firestore.admin_v1.proto.location_pb2",
- __doc__="""The metadata message for
- [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
-
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.LocationMetadata)
- ),
-)
-_sym_db.RegisterMessage(LocationMetadata)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py b/google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_admin_v1/proto/location_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_admin_v1/proto/operation.proto b/google/cloud/firestore_admin_v1/proto/operation.proto
deleted file mode 100644
index 08194fe093..0000000000
--- a/google/cloud/firestore_admin_v1/proto/operation.proto
+++ /dev/null
@@ -1,203 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1;
-
-import "google/firestore/admin/v1/index.proto";
-import "google/protobuf/timestamp.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "OperationProto";
-option java_package = "com.google.firestore.admin.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1";
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
-message IndexOperationMetadata {
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The index resource that this operation is acting on. For example:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
- string index = 3;
-
- // The state of the operation.
- OperationState state = 4;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 5;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 6;
-}
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
-message FieldOperationMetadata {
- // Information about an index configuration change.
- message IndexConfigDelta {
- // Specifies how the index is changing.
- enum ChangeType {
- // The type of change is not specified or known.
- CHANGE_TYPE_UNSPECIFIED = 0;
-
- // The single field index is being added.
- ADD = 1;
-
- // The single field index is being removed.
- REMOVE = 2;
- }
-
- // Specifies how the index is changing.
- ChangeType change_type = 1;
-
- // The index being changed.
- Index index = 2;
- }
-
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The field resource that this operation is acting on. For example:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
- string field = 3;
-
- // A list of [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this
- // operation.
- repeated IndexConfigDelta index_config_deltas = 4;
-
- // The state of the operation.
- OperationState state = 5;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 6;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 7;
-}
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
-message ExportDocumentsMetadata {
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The state of the export operation.
- OperationState operation_state = 3;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 4;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 5;
-
- // Which collection ids are being exported.
- repeated string collection_ids = 6;
-
- // Where the entities are being exported to.
- string output_uri_prefix = 7;
-}
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
-message ImportDocumentsMetadata {
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The state of the import operation.
- OperationState operation_state = 3;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 4;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 5;
-
- // Which collection ids are being imported.
- repeated string collection_ids = 6;
-
- // The location of the documents being imported.
- string input_uri_prefix = 7;
-}
-
-// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
-message ExportDocumentsResponse {
- // Location of the output files. This can be used to begin an import
- // into Cloud Firestore (this project or another project) after the operation
- // completes successfully.
- string output_uri_prefix = 1;
-}
-
-// Describes the progress of the operation.
-// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1.Progress]
-// is used.
-message Progress {
- // The amount of work estimated.
- int64 estimated_work = 1;
-
- // The amount of work completed.
- int64 completed_work = 2;
-}
-
-// Describes the state of the operation.
-enum OperationState {
- // Unspecified.
- OPERATION_STATE_UNSPECIFIED = 0;
-
- // Request is being prepared for processing.
- INITIALIZING = 1;
-
- // Request is actively being processed.
- PROCESSING = 2;
-
- // Request is in the process of being cancelled after user called
- // google.longrunning.Operations.CancelOperation on the operation.
- CANCELLING = 3;
-
- // Request has been processed and is in its finalization stage.
- FINALIZING = 4;
-
- // Request has completed successfully.
- SUCCESSFUL = 5;
-
- // Request has finished being processed, but encountered an error.
- FAILED = 6;
-
- // Request has finished being cancelled after user called
- // google.longrunning.Operations.CancelOperation.
- CANCELLED = 7;
-}
diff --git a/google/cloud/firestore_admin_v1/proto/operation_pb2.py b/google/cloud/firestore_admin_v1/proto/operation_pb2.py
deleted file mode 100644
index d34dd007f0..0000000000
--- a/google/cloud/firestore_admin_v1/proto/operation_pb2.py
+++ /dev/null
@@ -1,1110 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore/admin_v1/proto/operation.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_admin_v1.proto import (
- index_pb2 as google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2,
-)
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore/admin_v1/proto/operation.proto",
- package="google.firestore.admin.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\035com.google.firestore.admin.v1B\016OperationProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1"
- ),
- serialized_pb=_b(
- '\n5google/cloud/firestore/admin_v1/proto/operation.proto\x12\x19google.firestore.admin.v1\x1a\x31google/cloud/firestore/admin_v1/proto/index.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xbd\x02\n\x16IndexOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05index\x18\x03 \x01(\t\x12\x38\n\x05state\x18\x04 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x05 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x06 \x01(\x0b\x32#.google.firestore.admin.v1.Progress"\x88\x05\n\x16\x46ieldOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05\x66ield\x18\x03 \x01(\t\x12_\n\x13index_config_deltas\x18\x04 \x03(\x0b\x32\x42.google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta\x12\x38\n\x05state\x18\x05 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x06 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x07 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x1a\xe7\x01\n\x10IndexConfigDelta\x12\x62\n\x0b\x63hange_type\x18\x01 \x01(\x0e\x32M.google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.ChangeType\x12/\n\x05index\x18\x02 \x01(\x0b\x32 .google.firestore.admin.v1.Index">\n\nChangeType\x12\x1b\n\x17\x43HANGE_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02"\xec\x02\n\x17\x45xportDocumentsMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x0foperation_state\x18\x03 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x04 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x05 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12\x16\n\x0e\x63ollection_ids\x18\x06 \x03(\t\x12\x19\n\x11output_uri_prefix\x18\x07 \x01(\t"\xeb\x02\n\x17ImportDocumentsMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x0foperation_state\x18\x03 \x01(\x0e\x32).google.firestore.admin.v1.OperationState\x12?\n\x12progress_documents\x18\x04 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x05 \x01(\x0b\x32#.google.firestore.admin.v1.Progress\x12\x16\n\x0e\x63ollection_ids\x18\x06 \x03(\t\x12\x18\n\x10input_uri_prefix\x18\x07 \x01(\t"4\n\x17\x45xportDocumentsResponse\x12\x19\n\x11output_uri_prefix\x18\x01 \x01(\t":\n\x08Progress\x12\x16\n\x0e\x65stimated_work\x18\x01 \x01(\x03\x12\x16\n\x0e\x63ompleted_work\x18\x02 \x01(\x03*\x9e\x01\n\x0eOperationState\x12\x1f\n\x1bOPERATION_STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\x0e\n\nCANCELLING\x10\x03\x12\x0e\n\nFINALIZING\x10\x04\x12\x0e\n\nSUCCESSFUL\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x12\r\n\tCANCELLED\x10\x07\x42\xbc\x01\n\x1d\x63om.google.firestore.admin.v1B\x0eOperationProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3'
- ),
- dependencies=[
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-_OPERATIONSTATE = _descriptor.EnumDescriptor(
- name="OperationState",
- full_name="google.firestore.admin.v1.OperationState",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="OPERATION_STATE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="INITIALIZING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="PROCESSING", index=2, number=2, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="CANCELLING", index=3, number=3, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="FINALIZING", index=4, number=4, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="SUCCESSFUL", index=5, number=5, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="FAILED", index=6, number=6, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="CANCELLED", index=7, number=7, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=2017,
- serialized_end=2175,
-)
-_sym_db.RegisterEnumDescriptor(_OPERATIONSTATE)
-
-OperationState = enum_type_wrapper.EnumTypeWrapper(_OPERATIONSTATE)
-OPERATION_STATE_UNSPECIFIED = 0
-INITIALIZING = 1
-PROCESSING = 2
-CANCELLING = 3
-FINALIZING = 4
-SUCCESSFUL = 5
-FAILED = 6
-CANCELLED = 7
-
-
-_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE = _descriptor.EnumDescriptor(
- name="ChangeType",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.ChangeType",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="CHANGE_TYPE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="ADD", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REMOVE", index=2, number=2, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1105,
- serialized_end=1167,
-)
-_sym_db.RegisterEnumDescriptor(_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE)
-
-
-_INDEXOPERATIONMETADATA = _descriptor.Descriptor(
- name="IndexOperationMetadata",
- full_name="google.firestore.admin.v1.IndexOperationMetadata",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="start_time",
- full_name="google.firestore.admin.v1.IndexOperationMetadata.start_time",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_time",
- full_name="google.firestore.admin.v1.IndexOperationMetadata.end_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="index",
- full_name="google.firestore.admin.v1.IndexOperationMetadata.index",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="state",
- full_name="google.firestore.admin.v1.IndexOperationMetadata.state",
- index=3,
- number=4,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_documents",
- full_name="google.firestore.admin.v1.IndexOperationMetadata.progress_documents",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_bytes",
- full_name="google.firestore.admin.v1.IndexOperationMetadata.progress_bytes",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=199,
- serialized_end=516,
-)
-
-
-_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA = _descriptor.Descriptor(
- name="IndexConfigDelta",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="change_type",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.change_type",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="index",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta.index",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=936,
- serialized_end=1167,
-)
-
-_FIELDOPERATIONMETADATA = _descriptor.Descriptor(
- name="FieldOperationMetadata",
- full_name="google.firestore.admin.v1.FieldOperationMetadata",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="start_time",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.start_time",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_time",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.end_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.field",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="index_config_deltas",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.index_config_deltas",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="state",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.state",
- index=4,
- number=5,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_documents",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.progress_documents",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_bytes",
- full_name="google.firestore.admin.v1.FieldOperationMetadata.progress_bytes",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=519,
- serialized_end=1167,
-)
-
-
-_EXPORTDOCUMENTSMETADATA = _descriptor.Descriptor(
- name="ExportDocumentsMetadata",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="start_time",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.start_time",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_time",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.end_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="operation_state",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.operation_state",
- index=2,
- number=3,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_documents",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.progress_documents",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_bytes",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.progress_bytes",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="collection_ids",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.collection_ids",
- index=5,
- number=6,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="output_uri_prefix",
- full_name="google.firestore.admin.v1.ExportDocumentsMetadata.output_uri_prefix",
- index=6,
- number=7,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1170,
- serialized_end=1534,
-)
-
-
-_IMPORTDOCUMENTSMETADATA = _descriptor.Descriptor(
- name="ImportDocumentsMetadata",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="start_time",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.start_time",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_time",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.end_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="operation_state",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.operation_state",
- index=2,
- number=3,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_documents",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.progress_documents",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="progress_bytes",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.progress_bytes",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="collection_ids",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.collection_ids",
- index=5,
- number=6,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="input_uri_prefix",
- full_name="google.firestore.admin.v1.ImportDocumentsMetadata.input_uri_prefix",
- index=6,
- number=7,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1537,
- serialized_end=1900,
-)
-
-
-_EXPORTDOCUMENTSRESPONSE = _descriptor.Descriptor(
- name="ExportDocumentsResponse",
- full_name="google.firestore.admin.v1.ExportDocumentsResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="output_uri_prefix",
- full_name="google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1902,
- serialized_end=1954,
-)
-
-
-_PROGRESS = _descriptor.Descriptor(
- name="Progress",
- full_name="google.firestore.admin.v1.Progress",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="estimated_work",
- full_name="google.firestore.admin.v1.Progress.estimated_work",
- index=0,
- number=1,
- type=3,
- cpp_type=2,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="completed_work",
- full_name="google.firestore.admin.v1.Progress.completed_work",
- index=1,
- number=2,
- type=3,
- cpp_type=2,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1956,
- serialized_end=2014,
-)
-
-_INDEXOPERATIONMETADATA.fields_by_name[
- "start_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_INDEXOPERATIONMETADATA.fields_by_name[
- "end_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_INDEXOPERATIONMETADATA.fields_by_name["state"].enum_type = _OPERATIONSTATE
-_INDEXOPERATIONMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
-_INDEXOPERATIONMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
-_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA.fields_by_name[
- "change_type"
-].enum_type = _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE
-_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA.fields_by_name[
- "index"
-].message_type = (
- google_dot_cloud_dot_firestore_dot_admin__v1_dot_proto_dot_index__pb2._INDEX
-)
-_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA.containing_type = _FIELDOPERATIONMETADATA
-_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA_CHANGETYPE.containing_type = (
- _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA
-)
-_FIELDOPERATIONMETADATA.fields_by_name[
- "start_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_FIELDOPERATIONMETADATA.fields_by_name[
- "end_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_FIELDOPERATIONMETADATA.fields_by_name[
- "index_config_deltas"
-].message_type = _FIELDOPERATIONMETADATA_INDEXCONFIGDELTA
-_FIELDOPERATIONMETADATA.fields_by_name["state"].enum_type = _OPERATIONSTATE
-_FIELDOPERATIONMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
-_FIELDOPERATIONMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
-_EXPORTDOCUMENTSMETADATA.fields_by_name[
- "start_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_EXPORTDOCUMENTSMETADATA.fields_by_name[
- "end_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_EXPORTDOCUMENTSMETADATA.fields_by_name["operation_state"].enum_type = _OPERATIONSTATE
-_EXPORTDOCUMENTSMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
-_EXPORTDOCUMENTSMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
-_IMPORTDOCUMENTSMETADATA.fields_by_name[
- "start_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_IMPORTDOCUMENTSMETADATA.fields_by_name[
- "end_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_IMPORTDOCUMENTSMETADATA.fields_by_name["operation_state"].enum_type = _OPERATIONSTATE
-_IMPORTDOCUMENTSMETADATA.fields_by_name["progress_documents"].message_type = _PROGRESS
-_IMPORTDOCUMENTSMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS
-DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA
-DESCRIPTOR.message_types_by_name["FieldOperationMetadata"] = _FIELDOPERATIONMETADATA
-DESCRIPTOR.message_types_by_name["ExportDocumentsMetadata"] = _EXPORTDOCUMENTSMETADATA
-DESCRIPTOR.message_types_by_name["ImportDocumentsMetadata"] = _IMPORTDOCUMENTSMETADATA
-DESCRIPTOR.message_types_by_name["ExportDocumentsResponse"] = _EXPORTDOCUMENTSRESPONSE
-DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS
-DESCRIPTOR.enum_types_by_name["OperationState"] = _OPERATIONSTATE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-IndexOperationMetadata = _reflection.GeneratedProtocolMessageType(
- "IndexOperationMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INDEXOPERATIONMETADATA,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Metadata for
- [google.longrunning.Operation][google.longrunning.Operation] results
- from
- [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
-
-
- Attributes:
- start_time:
- The time this operation started.
- end_time:
- The time this operation completed. Will be unset if operation
- still in progress.
- index:
- The index resource that this operation is acting on. For
- example: ``projects/{project_id}/databases/{database_id}/colle
- ctionGroups/{collection_id}/indexes/{index_id}``
- state:
- The state of the operation.
- progress_documents:
- The progress, in documents, of this operation.
- progress_bytes:
- The progress, in bytes, of this operation.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.IndexOperationMetadata)
- ),
-)
-_sym_db.RegisterMessage(IndexOperationMetadata)
-
-FieldOperationMetadata = _reflection.GeneratedProtocolMessageType(
- "FieldOperationMetadata",
- (_message.Message,),
- dict(
- IndexConfigDelta=_reflection.GeneratedProtocolMessageType(
- "IndexConfigDelta",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FIELDOPERATIONMETADATA_INDEXCONFIGDELTA,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Information about an index configuration change.
-
-
- Attributes:
- change_type:
- Specifies how the index is changing.
- index:
- The index being changed.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta)
- ),
- ),
- DESCRIPTOR=_FIELDOPERATIONMETADATA,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Metadata for
- [google.longrunning.Operation][google.longrunning.Operation] results
- from
- [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
-
-
- Attributes:
- start_time:
- The time this operation started.
- end_time:
- The time this operation completed. Will be unset if operation
- still in progress.
- field:
- The field resource that this operation is acting on. For
- example: ``projects/{project_id}/databases/{database_id}/colle
- ctionGroups/{collection_id}/fields/{field_path}``
- index_config_deltas:
- A list of [IndexConfigDelta][google.firestore.admin.v1.FieldOp
- erationMetadata.IndexConfigDelta], which describe the intent
- of this operation.
- state:
- The state of the operation.
- progress_documents:
- The progress, in documents, of this operation.
- progress_bytes:
- The progress, in bytes, of this operation.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.FieldOperationMetadata)
- ),
-)
-_sym_db.RegisterMessage(FieldOperationMetadata)
-_sym_db.RegisterMessage(FieldOperationMetadata.IndexConfigDelta)
-
-ExportDocumentsMetadata = _reflection.GeneratedProtocolMessageType(
- "ExportDocumentsMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_EXPORTDOCUMENTSMETADATA,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Metadata for
- [google.longrunning.Operation][google.longrunning.Operation] results
- from
- [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
-
-
- Attributes:
- start_time:
- The time this operation started.
- end_time:
- The time this operation completed. Will be unset if operation
- still in progress.
- operation_state:
- The state of the export operation.
- progress_documents:
- The progress, in documents, of this operation.
- progress_bytes:
- The progress, in bytes, of this operation.
- collection_ids:
- Which collection ids are being exported.
- output_uri_prefix:
- Where the entities are being exported to.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ExportDocumentsMetadata)
- ),
-)
-_sym_db.RegisterMessage(ExportDocumentsMetadata)
-
-ImportDocumentsMetadata = _reflection.GeneratedProtocolMessageType(
- "ImportDocumentsMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_IMPORTDOCUMENTSMETADATA,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Metadata for
- [google.longrunning.Operation][google.longrunning.Operation] results
- from
- [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
-
-
- Attributes:
- start_time:
- The time this operation started.
- end_time:
- The time this operation completed. Will be unset if operation
- still in progress.
- operation_state:
- The state of the import operation.
- progress_documents:
- The progress, in documents, of this operation.
- progress_bytes:
- The progress, in bytes, of this operation.
- collection_ids:
- Which collection ids are being imported.
- input_uri_prefix:
- The location of the documents being imported.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ImportDocumentsMetadata)
- ),
-)
-_sym_db.RegisterMessage(ImportDocumentsMetadata)
-
-ExportDocumentsResponse = _reflection.GeneratedProtocolMessageType(
- "ExportDocumentsResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_EXPORTDOCUMENTSRESPONSE,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Returned in the
- [google.longrunning.Operation][google.longrunning.Operation] response
- field.
-
-
- Attributes:
- output_uri_prefix:
- Location of the output files. This can be used to begin an
- import into Cloud Firestore (this project or another project)
- after the operation completes successfully.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.ExportDocumentsResponse)
- ),
-)
-_sym_db.RegisterMessage(ExportDocumentsResponse)
-
-Progress = _reflection.GeneratedProtocolMessageType(
- "Progress",
- (_message.Message,),
- dict(
- DESCRIPTOR=_PROGRESS,
- __module__="google.cloud.firestore.admin_v1.proto.operation_pb2",
- __doc__="""Describes the progress of the operation. Unit of work is
- generic and must be interpreted based on where
- [Progress][google.firestore.admin.v1.Progress] is used.
-
-
- Attributes:
- estimated_work:
- The amount of work estimated.
- completed_work:
- The amount of work completed.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1.Progress)
- ),
-)
-_sym_db.RegisterMessage(Progress)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py b/google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_admin_v1/proto/operation_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_admin_v1/py.typed b/google/cloud/firestore_admin_v1/py.typed
new file mode 100644
index 0000000000..f7a4796eee
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-firestore-admin package uses inline types.
diff --git a/tests/unit/v1beta1/__init__.py b/google/cloud/firestore_admin_v1/services/__init__.py
similarity index 90%
rename from tests/unit/v1beta1/__init__.py
rename to google/cloud/firestore_admin_v1/services/__init__.py
index ab67290952..42ffdf2bc4 100644
--- a/tests/unit/v1beta1/__init__.py
+++ b/google/cloud/firestore_admin_v1/services/__init__.py
@@ -1,4 +1,6 @@
-# Copyright 2017 Google LLC
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,3 +13,4 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py
new file mode 100644
index 0000000000..7005212e52
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .client import FirestoreAdminClient
+from .async_client import FirestoreAdminAsyncClient
+
+__all__ = (
+ "FirestoreAdminClient",
+ "FirestoreAdminAsyncClient",
+)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py
new file mode 100644
index 0000000000..38e6406eb5
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py
@@ -0,0 +1,946 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation as ga_operation # type: ignore
+from google.api_core import operation_async # type: ignore
+from google.cloud.firestore_admin_v1.services.firestore_admin import pagers
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import field as gfa_field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+from google.cloud.firestore_admin_v1.types import index as gfa_index
+from google.cloud.firestore_admin_v1.types import operation as gfa_operation
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport
+from .client import FirestoreAdminClient
+
+
+class FirestoreAdminAsyncClient:
+ """Operations are created by service ``FirestoreAdmin``, but are
+ accessed via service ``google.longrunning.Operations``.
+ """
+
+ _client: FirestoreAdminClient
+
+ DEFAULT_ENDPOINT = FirestoreAdminClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT
+
+ field_path = staticmethod(FirestoreAdminClient.field_path)
+ parse_field_path = staticmethod(FirestoreAdminClient.parse_field_path)
+ index_path = staticmethod(FirestoreAdminClient.index_path)
+ parse_index_path = staticmethod(FirestoreAdminClient.parse_index_path)
+
+ from_service_account_file = FirestoreAdminClient.from_service_account_file
+ from_service_account_json = from_service_account_file
+
+ get_transport_class = functools.partial(
+ type(FirestoreAdminClient).get_transport_class, type(FirestoreAdminClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, FirestoreAdminTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the firestore admin client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.FirestoreAdminTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = FirestoreAdminClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def create_index(
+ self,
+ request: firestore_admin.CreateIndexRequest = None,
+ *,
+ parent: str = None,
+ index: gfa_index.Index = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Creates a composite index. This returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the creation. The
+ metadata for the operation will be the type
+ [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
+
+ Args:
+ request (:class:`~.firestore_admin.CreateIndexRequest`):
+ The request object. The request for
+ [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
+ parent (:class:`str`):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ index (:class:`~.gfa_index.Index`):
+ Required. The composite index to
+ create.
+ This corresponds to the ``index`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.gfa_index.Index``: Cloud Firestore indexes
+ enable simple and complex queries against documents in a
+ database.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([parent, index]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.CreateIndexRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if index is not None:
+ request.index = index
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_index,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ gfa_index.Index,
+ metadata_type=gfa_operation.IndexOperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def list_indexes(
+ self,
+ request: firestore_admin.ListIndexesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListIndexesAsyncPager:
+ r"""Lists composite indexes.
+
+ Args:
+ request (:class:`~.firestore_admin.ListIndexesRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+ parent (:class:`str`):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListIndexesAsyncPager:
+ The response for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([parent]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.ListIndexesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_indexes,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListIndexesAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def get_index(
+ self,
+ request: firestore_admin.GetIndexRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> index.Index:
+ r"""Gets a composite index.
+
+ Args:
+ request (:class:`~.firestore_admin.GetIndexRequest`):
+ The request object. The request for
+ [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
+ name (:class:`str`):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.index.Index:
+ Cloud Firestore indexes enable simple
+ and complex queries against documents in
+ a database.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.GetIndexRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_index,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def delete_index(
+ self,
+ request: firestore_admin.DeleteIndexRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a composite index.
+
+ Args:
+ request (:class:`~.firestore_admin.DeleteIndexRequest`):
+ The request object. The request for
+ [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
+ name (:class:`str`):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.DeleteIndexRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_index,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ async def get_field(
+ self,
+ request: firestore_admin.GetFieldRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> field.Field:
+ r"""Gets the metadata and configuration for a Field.
+
+ Args:
+ request (:class:`~.firestore_admin.GetFieldRequest`):
+ The request object. The request for
+ [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
+ name (:class:`str`):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.field.Field:
+ Represents a single field in the
+ database.
+ Fields are grouped by their "Collection
+ Group", which represent all collections
+ in the database with the same id.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.GetFieldRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_field,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def update_field(
+ self,
+ request: firestore_admin.UpdateFieldRequest = None,
+ *,
+ field: gfa_field.Field = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Updates a field configuration. Currently, field updates apply
+ only to single field index configuration. However, calls to
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]
+ should provide a field mask to avoid changing any configuration
+ that the caller isn't aware of. The field mask should be
+ specified as: ``{ paths: "index_config" }``.
+
+ This call returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the field update. The
+ metadata for the operation will be the type
+ [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
+
+ To configure the default field settings for the database, use
+ the special ``Field`` with resource name:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
+
+ Args:
+ request (:class:`~.firestore_admin.UpdateFieldRequest`):
+ The request object. The request for
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
+ field (:class:`~.gfa_field.Field`):
+ Required. The field to be updated.
+ This corresponds to the ``field`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.gfa_field.Field``: Represents a single field
+ in the database.
+
+ Fields are grouped by their "Collection Group", which
+ represent all collections in the database with the same
+ id.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([field]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.UpdateFieldRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if field is not None:
+ request.field = field
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.update_field,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("field.name", request.field.name),)
+ ),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ gfa_field.Field,
+ metadata_type=gfa_operation.FieldOperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def list_fields(
+ self,
+ request: firestore_admin.ListFieldsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListFieldsAsyncPager:
+ r"""Lists the field configuration and metadata for this database.
+
+ Currently,
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ only supports listing fields that have been explicitly
+ overridden. To issue this query, call
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ with the filter set to ``indexConfig.usesAncestorConfig:false``.
+
+ Args:
+ request (:class:`~.firestore_admin.ListFieldsRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+ parent (:class:`str`):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListFieldsAsyncPager:
+ The response for
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([parent]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.ListFieldsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_fields,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListFieldsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def export_documents(
+ self,
+ request: firestore_admin.ExportDocumentsRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Exports a copy of all or a subset of documents from
+ Google Cloud Firestore to another storage system, such
+ as Google Cloud Storage. Recent updates to documents may
+ not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed
+ via the Operation resource that is created. The output
+ of an export may only be used once the associated
+ operation is done. If an export operation is cancelled
+ before completion it may leave partial data behind in
+ Google Cloud Storage.
+
+ Args:
+ request (:class:`~.firestore_admin.ExportDocumentsRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
+ name (:class:`str`):
+ Required. Database to export. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.gfa_operation.ExportDocumentsResponse``:
+ Returned in the
+ [google.longrunning.Operation][google.longrunning.Operation]
+ response field.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.ExportDocumentsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.export_documents,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ gfa_operation.ExportDocumentsResponse,
+ metadata_type=gfa_operation.ExportDocumentsMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def import_documents(
+ self,
+ request: firestore_admin.ImportDocumentsRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Imports documents into Google Cloud Firestore.
+ Existing documents with the same name are overwritten.
+ The import occurs in the background and its progress can
+ be monitored and managed via the Operation resource that
+ is created. If an ImportDocuments operation is
+ cancelled, it is possible that a subset of the data has
+ already been imported to Cloud Firestore.
+
+ Args:
+ request (:class:`~.firestore_admin.ImportDocumentsRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
+ name (:class:`str`):
+ Required. Database to import into. Should be of the
+ form: ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.empty.Empty``: A generic empty message that
+ you can re-use to avoid defining duplicated empty
+ messages in your APIs. A typical example is to use it as
+ the request or the response type of an API method. For
+ instance:
+
+ ::
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+
+ The JSON representation for ``Empty`` is empty JSON
+ object ``{}``.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore_admin.ImportDocumentsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.import_documents,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ empty.Empty,
+ metadata_type=gfa_operation.ImportDocumentsMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-firestore-admin",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("FirestoreAdminAsyncClient",)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py
new file mode 100644
index 0000000000..f721cee47d
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py
@@ -0,0 +1,1089 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
+import pkg_resources
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation as ga_operation # type: ignore
+from google.api_core import operation_async # type: ignore
+from google.cloud.firestore_admin_v1.services.firestore_admin import pagers
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import field as gfa_field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+from google.cloud.firestore_admin_v1.types import index as gfa_index
+from google.cloud.firestore_admin_v1.types import operation as gfa_operation
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import FirestoreAdminGrpcTransport
+from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport
+
+
+class FirestoreAdminClientMeta(type):
+ """Metaclass for the FirestoreAdmin client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = (
+ OrderedDict()
+ ) # type: Dict[str, Type[FirestoreAdminTransport]]
+ _transport_registry["grpc"] = FirestoreAdminGrpcTransport
+ _transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport
+
+ def get_transport_class(cls, label: str = None,) -> Type[FirestoreAdminTransport]:
+ """Return an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class FirestoreAdminClient(metaclass=FirestoreAdminClientMeta):
+ """Operations are created by service ``FirestoreAdmin``, but are
+ accessed via service ``google.longrunning.Operations``.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P
[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "firestore.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ {@api.name}: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @staticmethod
+ def field_path(project: str, database: str, collection: str, field: str,) -> str:
+ """Return a fully-qualified field string."""
+ return "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format(
+ project=project, database=database, collection=collection, field=field,
+ )
+
+ @staticmethod
+ def parse_field_path(path: str) -> Dict[str, str]:
+ """Parse a field path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/fields/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def index_path(project: str, database: str, collection: str, index: str,) -> str:
+ """Return a fully-qualified index string."""
+ return "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format(
+ project=project, database=database, collection=collection, index=index,
+ )
+
+ @staticmethod
+ def parse_index_path(path: str) -> Dict[str, str]:
+ """Parse a index path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/indexes/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, FirestoreAdminTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the firestore admin client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.FirestoreAdminTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, FirestoreAdminTransport):
+ # transport is a FirestoreAdminTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ ssl_channel_credentials=ssl_credentials,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ )
+
+ def create_index(
+ self,
+ request: firestore_admin.CreateIndexRequest = None,
+ *,
+ parent: str = None,
+ index: gfa_index.Index = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> ga_operation.Operation:
+ r"""Creates a composite index. This returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the creation. The
+ metadata for the operation will be the type
+ [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
+
+ Args:
+ request (:class:`~.firestore_admin.CreateIndexRequest`):
+ The request object. The request for
+ [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
+ parent (:class:`str`):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ index (:class:`~.gfa_index.Index`):
+ Required. The composite index to
+ create.
+ This corresponds to the ``index`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.ga_operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.gfa_index.Index``: Cloud Firestore indexes
+ enable simple and complex queries against documents in a
+ database.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, index])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.CreateIndexRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.CreateIndexRequest):
+ request = firestore_admin.CreateIndexRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if index is not None:
+ request.index = index
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.create_index]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = ga_operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ gfa_index.Index,
+ metadata_type=gfa_operation.IndexOperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def list_indexes(
+ self,
+ request: firestore_admin.ListIndexesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListIndexesPager:
+ r"""Lists composite indexes.
+
+ Args:
+ request (:class:`~.firestore_admin.ListIndexesRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+ parent (:class:`str`):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListIndexesPager:
+ The response for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.ListIndexesRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.ListIndexesRequest):
+ request = firestore_admin.ListIndexesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_indexes]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListIndexesPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_index(
+ self,
+ request: firestore_admin.GetIndexRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> index.Index:
+ r"""Gets a composite index.
+
+ Args:
+ request (:class:`~.firestore_admin.GetIndexRequest`):
+ The request object. The request for
+ [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
+ name (:class:`str`):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.index.Index:
+ Cloud Firestore indexes enable simple
+ and complex queries against documents in
+ a database.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.GetIndexRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.GetIndexRequest):
+ request = firestore_admin.GetIndexRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_index]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def delete_index(
+ self,
+ request: firestore_admin.DeleteIndexRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a composite index.
+
+ Args:
+ request (:class:`~.firestore_admin.DeleteIndexRequest`):
+ The request object. The request for
+ [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
+ name (:class:`str`):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.DeleteIndexRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.DeleteIndexRequest):
+ request = firestore_admin.DeleteIndexRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_index]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def get_field(
+ self,
+ request: firestore_admin.GetFieldRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> field.Field:
+ r"""Gets the metadata and configuration for a Field.
+
+ Args:
+ request (:class:`~.firestore_admin.GetFieldRequest`):
+ The request object. The request for
+ [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
+ name (:class:`str`):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.field.Field:
+ Represents a single field in the
+ database.
+ Fields are grouped by their "Collection
+ Group", which represent all collections
+ in the database with the same id.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.GetFieldRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.GetFieldRequest):
+ request = firestore_admin.GetFieldRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_field]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def update_field(
+ self,
+ request: firestore_admin.UpdateFieldRequest = None,
+ *,
+ field: gfa_field.Field = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> ga_operation.Operation:
+ r"""Updates a field configuration. Currently, field updates apply
+ only to single field index configuration. However, calls to
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]
+ should provide a field mask to avoid changing any configuration
+ that the caller isn't aware of. The field mask should be
+ specified as: ``{ paths: "index_config" }``.
+
+ This call returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the field update. The
+ metadata for the operation will be the type
+ [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
+
+ To configure the default field settings for the database, use
+ the special ``Field`` with resource name:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
+
+ Args:
+ request (:class:`~.firestore_admin.UpdateFieldRequest`):
+ The request object. The request for
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
+ field (:class:`~.gfa_field.Field`):
+ Required. The field to be updated.
+ This corresponds to the ``field`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.ga_operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.gfa_field.Field``: Represents a single field
+ in the database.
+
+ Fields are grouped by their "Collection Group", which
+ represent all collections in the database with the same
+ id.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([field])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.UpdateFieldRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.UpdateFieldRequest):
+ request = firestore_admin.UpdateFieldRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if field is not None:
+ request.field = field
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.update_field]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("field.name", request.field.name),)
+ ),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = ga_operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ gfa_field.Field,
+ metadata_type=gfa_operation.FieldOperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def list_fields(
+ self,
+ request: firestore_admin.ListFieldsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListFieldsPager:
+ r"""Lists the field configuration and metadata for this database.
+
+ Currently,
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ only supports listing fields that have been explicitly
+ overridden. To issue this query, call
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ with the filter set to ``indexConfig.usesAncestorConfig:false``.
+
+ Args:
+ request (:class:`~.firestore_admin.ListFieldsRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+ parent (:class:`str`):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListFieldsPager:
+ The response for
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.ListFieldsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.ListFieldsRequest):
+ request = firestore_admin.ListFieldsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_fields]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListFieldsPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def export_documents(
+ self,
+ request: firestore_admin.ExportDocumentsRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> ga_operation.Operation:
+ r"""Exports a copy of all or a subset of documents from
+ Google Cloud Firestore to another storage system, such
+ as Google Cloud Storage. Recent updates to documents may
+ not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed
+ via the Operation resource that is created. The output
+ of an export may only be used once the associated
+ operation is done. If an export operation is cancelled
+ before completion it may leave partial data behind in
+ Google Cloud Storage.
+
+ Args:
+ request (:class:`~.firestore_admin.ExportDocumentsRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
+ name (:class:`str`):
+ Required. Database to export. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.ga_operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.gfa_operation.ExportDocumentsResponse``:
+ Returned in the
+ [google.longrunning.Operation][google.longrunning.Operation]
+ response field.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.ExportDocumentsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.ExportDocumentsRequest):
+ request = firestore_admin.ExportDocumentsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.export_documents]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = ga_operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ gfa_operation.ExportDocumentsResponse,
+ metadata_type=gfa_operation.ExportDocumentsMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def import_documents(
+ self,
+ request: firestore_admin.ImportDocumentsRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> ga_operation.Operation:
+ r"""Imports documents into Google Cloud Firestore.
+ Existing documents with the same name are overwritten.
+ The import occurs in the background and its progress can
+ be monitored and managed via the Operation resource that
+ is created. If an ImportDocuments operation is
+ cancelled, it is possible that a subset of the data has
+ already been imported to Cloud Firestore.
+
+ Args:
+ request (:class:`~.firestore_admin.ImportDocumentsRequest`):
+ The request object. The request for
+ [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
+ name (:class:`str`):
+ Required. Database to import into. Should be of the
+ form: ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.ga_operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.empty.Empty``: A generic empty message that
+ you can re-use to avoid defining duplicated empty
+ messages in your APIs. A typical example is to use it as
+ the request or the response type of an API method. For
+ instance:
+
+ ::
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+
+ The JSON representation for ``Empty`` is empty JSON
+ object ``{}``.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore_admin.ImportDocumentsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore_admin.ImportDocumentsRequest):
+ request = firestore_admin.ImportDocumentsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.import_documents]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = ga_operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ empty.Empty,
+ metadata_type=gfa_operation.ImportDocumentsMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-firestore-admin",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("FirestoreAdminClient",)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py
new file mode 100644
index 0000000000..2525da38a8
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py
@@ -0,0 +1,278 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+
+
+class ListIndexesPager:
+ """A pager for iterating through ``list_indexes`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore_admin.ListIndexesResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``indexes`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListIndexes`` requests and continue to iterate
+ through the ``indexes`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore_admin.ListIndexesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., firestore_admin.ListIndexesResponse],
+ request: firestore_admin.ListIndexesRequest,
+ response: firestore_admin.ListIndexesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore_admin.ListIndexesRequest`):
+ The initial request object.
+ response (:class:`~.firestore_admin.ListIndexesResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore_admin.ListIndexesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[firestore_admin.ListIndexesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[index.Index]:
+ for page in self.pages:
+ yield from page.indexes
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListIndexesAsyncPager:
+ """A pager for iterating through ``list_indexes`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore_admin.ListIndexesResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``indexes`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListIndexes`` requests and continue to iterate
+ through the ``indexes`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore_admin.ListIndexesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[firestore_admin.ListIndexesResponse]],
+ request: firestore_admin.ListIndexesRequest,
+ response: firestore_admin.ListIndexesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore_admin.ListIndexesRequest`):
+ The initial request object.
+ response (:class:`~.firestore_admin.ListIndexesResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore_admin.ListIndexesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[firestore_admin.ListIndexesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[index.Index]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.indexes:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListFieldsPager:
+ """A pager for iterating through ``list_fields`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore_admin.ListFieldsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``fields`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListFields`` requests and continue to iterate
+ through the ``fields`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore_admin.ListFieldsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., firestore_admin.ListFieldsResponse],
+ request: firestore_admin.ListFieldsRequest,
+ response: firestore_admin.ListFieldsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore_admin.ListFieldsRequest`):
+ The initial request object.
+ response (:class:`~.firestore_admin.ListFieldsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore_admin.ListFieldsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[firestore_admin.ListFieldsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[field.Field]:
+ for page in self.pages:
+ yield from page.fields
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListFieldsAsyncPager:
+ """A pager for iterating through ``list_fields`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore_admin.ListFieldsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``fields`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListFields`` requests and continue to iterate
+ through the ``fields`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore_admin.ListFieldsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[firestore_admin.ListFieldsResponse]],
+ request: firestore_admin.ListFieldsRequest,
+ response: firestore_admin.ListFieldsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore_admin.ListFieldsRequest`):
+ The initial request object.
+ response (:class:`~.firestore_admin.ListFieldsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore_admin.ListFieldsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[firestore_admin.ListFieldsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[field.Field]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.fields:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py
new file mode 100644
index 0000000000..08dd3f989b
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import FirestoreAdminTransport
+from .grpc import FirestoreAdminGrpcTransport
+from .grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]]
+_transport_registry["grpc"] = FirestoreAdminGrpcTransport
+_transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport
+
+
+__all__ = (
+ "FirestoreAdminTransport",
+ "FirestoreAdminGrpcTransport",
+ "FirestoreAdminGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py
new file mode 100644
index 0000000000..ac4c4475f5
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py
@@ -0,0 +1,299 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import abc
+import typing
+import pkg_resources
+
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google.auth import credentials # type: ignore
+
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+from google.longrunning import operations_pb2 as operations # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-firestore-admin",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class FirestoreAdminTransport(abc.ABC):
+ """Abstract transport class for FirestoreAdmin."""
+
+ AUTH_SCOPES = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages(client_info)
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.create_index: gapic_v1.method.wrap_method(
+ self.create_index, default_timeout=60.0, client_info=client_info,
+ ),
+ self.list_indexes: gapic_v1.method.wrap_method(
+ self.list_indexes,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_index: gapic_v1.method.wrap_method(
+ self.get_index,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.delete_index: gapic_v1.method.wrap_method(
+ self.delete_index,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_field: gapic_v1.method.wrap_method(
+ self.get_field,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.update_field: gapic_v1.method.wrap_method(
+ self.update_field, default_timeout=60.0, client_info=client_info,
+ ),
+ self.list_fields: gapic_v1.method.wrap_method(
+ self.list_fields,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.export_documents: gapic_v1.method.wrap_method(
+ self.export_documents, default_timeout=60.0, client_info=client_info,
+ ),
+ self.import_documents: gapic_v1.method.wrap_method(
+ self.import_documents, default_timeout=60.0, client_info=client_info,
+ ),
+ }
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsClient:
+ """Return the client designed to process long-running operations."""
+ raise NotImplementedError()
+
+ @property
+ def create_index(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.CreateIndexRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def list_indexes(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.ListIndexesRequest],
+ typing.Union[
+ firestore_admin.ListIndexesResponse,
+ typing.Awaitable[firestore_admin.ListIndexesResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_index(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.GetIndexRequest],
+ typing.Union[index.Index, typing.Awaitable[index.Index]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_index(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.DeleteIndexRequest],
+ typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_field(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.GetFieldRequest],
+ typing.Union[field.Field, typing.Awaitable[field.Field]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def update_field(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.UpdateFieldRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def list_fields(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.ListFieldsRequest],
+ typing.Union[
+ firestore_admin.ListFieldsResponse,
+ typing.Awaitable[firestore_admin.ListFieldsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def export_documents(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.ExportDocumentsRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def import_documents(
+ self,
+ ) -> typing.Callable[
+ [firestore_admin.ImportDocumentsRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("FirestoreAdminTransport",)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py
new file mode 100644
index 0000000000..dc82e06e8d
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py
@@ -0,0 +1,530 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import grpc_helpers # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+from google.longrunning import operations_pb2 as operations # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO
+
+
+class FirestoreAdminGrpcTransport(FirestoreAdminTransport):
+ """gRPC backend transport for FirestoreAdmin.
+
+ Operations are created by service ``FirestoreAdmin``, but are
+ accessed via service ``google.longrunning.Operations``.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _stubs: Dict[str, Callable]
+
+ def __init__(
+ self,
+ *,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ channel (Optional[grpc.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ self._stubs = {} # type: Dict[str, Callable]
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> grpc.Channel:
+ """Create and return a gRPC channel object.
+ Args:
+ address (Optionsl[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ @property
+ def grpc_channel(self) -> grpc.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if "operations_client" not in self.__dict__:
+ self.__dict__["operations_client"] = operations_v1.OperationsClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self.__dict__["operations_client"]
+
+ @property
+ def create_index(
+ self,
+ ) -> Callable[[firestore_admin.CreateIndexRequest], operations.Operation]:
+ r"""Return a callable for the create index method over gRPC.
+
+ Creates a composite index. This returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the creation. The
+ metadata for the operation will be the type
+ [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
+
+ Returns:
+ Callable[[~.CreateIndexRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_index" not in self._stubs:
+ self._stubs["create_index"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
+ request_serializer=firestore_admin.CreateIndexRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["create_index"]
+
+ @property
+ def list_indexes(
+ self,
+ ) -> Callable[
+ [firestore_admin.ListIndexesRequest], firestore_admin.ListIndexesResponse
+ ]:
+ r"""Return a callable for the list indexes method over gRPC.
+
+ Lists composite indexes.
+
+ Returns:
+ Callable[[~.ListIndexesRequest],
+ ~.ListIndexesResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_indexes" not in self._stubs:
+ self._stubs["list_indexes"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
+ request_serializer=firestore_admin.ListIndexesRequest.serialize,
+ response_deserializer=firestore_admin.ListIndexesResponse.deserialize,
+ )
+ return self._stubs["list_indexes"]
+
+ @property
+ def get_index(self) -> Callable[[firestore_admin.GetIndexRequest], index.Index]:
+ r"""Return a callable for the get index method over gRPC.
+
+ Gets a composite index.
+
+ Returns:
+ Callable[[~.GetIndexRequest],
+ ~.Index]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_index" not in self._stubs:
+ self._stubs["get_index"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
+ request_serializer=firestore_admin.GetIndexRequest.serialize,
+ response_deserializer=index.Index.deserialize,
+ )
+ return self._stubs["get_index"]
+
+ @property
+ def delete_index(
+ self,
+ ) -> Callable[[firestore_admin.DeleteIndexRequest], empty.Empty]:
+ r"""Return a callable for the delete index method over gRPC.
+
+ Deletes a composite index.
+
+ Returns:
+ Callable[[~.DeleteIndexRequest],
+ ~.Empty]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_index" not in self._stubs:
+ self._stubs["delete_index"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
+ request_serializer=firestore_admin.DeleteIndexRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_index"]
+
+ @property
+ def get_field(self) -> Callable[[firestore_admin.GetFieldRequest], field.Field]:
+ r"""Return a callable for the get field method over gRPC.
+
+ Gets the metadata and configuration for a Field.
+
+ Returns:
+ Callable[[~.GetFieldRequest],
+ ~.Field]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_field" not in self._stubs:
+ self._stubs["get_field"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/GetField",
+ request_serializer=firestore_admin.GetFieldRequest.serialize,
+ response_deserializer=field.Field.deserialize,
+ )
+ return self._stubs["get_field"]
+
+ @property
+ def update_field(
+ self,
+ ) -> Callable[[firestore_admin.UpdateFieldRequest], operations.Operation]:
+ r"""Return a callable for the update field method over gRPC.
+
+ Updates a field configuration. Currently, field updates apply
+ only to single field index configuration. However, calls to
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]
+ should provide a field mask to avoid changing any configuration
+ that the caller isn't aware of. The field mask should be
+ specified as: ``{ paths: "index_config" }``.
+
+ This call returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the field update. The
+ metadata for the operation will be the type
+ [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
+
+ To configure the default field settings for the database, use
+ the special ``Field`` with resource name:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
+
+ Returns:
+ Callable[[~.UpdateFieldRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_field" not in self._stubs:
+ self._stubs["update_field"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
+ request_serializer=firestore_admin.UpdateFieldRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["update_field"]
+
+ @property
+ def list_fields(
+ self,
+ ) -> Callable[
+ [firestore_admin.ListFieldsRequest], firestore_admin.ListFieldsResponse
+ ]:
+ r"""Return a callable for the list fields method over gRPC.
+
+ Lists the field configuration and metadata for this database.
+
+ Currently,
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ only supports listing fields that have been explicitly
+ overridden. To issue this query, call
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ with the filter set to ``indexConfig.usesAncestorConfig:false``.
+
+ Returns:
+ Callable[[~.ListFieldsRequest],
+ ~.ListFieldsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_fields" not in self._stubs:
+ self._stubs["list_fields"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ListFields",
+ request_serializer=firestore_admin.ListFieldsRequest.serialize,
+ response_deserializer=firestore_admin.ListFieldsResponse.deserialize,
+ )
+ return self._stubs["list_fields"]
+
+ @property
+ def export_documents(
+ self,
+ ) -> Callable[[firestore_admin.ExportDocumentsRequest], operations.Operation]:
+ r"""Return a callable for the export documents method over gRPC.
+
+ Exports a copy of all or a subset of documents from
+ Google Cloud Firestore to another storage system, such
+ as Google Cloud Storage. Recent updates to documents may
+ not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed
+ via the Operation resource that is created. The output
+ of an export may only be used once the associated
+ operation is done. If an export operation is cancelled
+ before completion it may leave partial data behind in
+ Google Cloud Storage.
+
+ Returns:
+ Callable[[~.ExportDocumentsRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "export_documents" not in self._stubs:
+ self._stubs["export_documents"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
+ request_serializer=firestore_admin.ExportDocumentsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["export_documents"]
+
+ @property
+ def import_documents(
+ self,
+ ) -> Callable[[firestore_admin.ImportDocumentsRequest], operations.Operation]:
+ r"""Return a callable for the import documents method over gRPC.
+
+ Imports documents into Google Cloud Firestore.
+ Existing documents with the same name are overwritten.
+ The import occurs in the background and its progress can
+ be monitored and managed via the Operation resource that
+ is created. If an ImportDocuments operation is
+ cancelled, it is possible that a subset of the data has
+ already been imported to Cloud Firestore.
+
+ Returns:
+ Callable[[~.ImportDocumentsRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_documents" not in self._stubs:
+ self._stubs["import_documents"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
+ request_serializer=firestore_admin.ImportDocumentsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_documents"]
+
+
+__all__ = ("FirestoreAdminGrpcTransport",)
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py
new file mode 100644
index 0000000000..30ce02fc18
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py
@@ -0,0 +1,544 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+from google.longrunning import operations_pb2 as operations # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO
+from .grpc import FirestoreAdminGrpcTransport
+
+
+class FirestoreAdminGrpcAsyncIOTransport(FirestoreAdminTransport):
+ """gRPC AsyncIO backend transport for FirestoreAdmin.
+
+ Operations are created by service ``FirestoreAdmin``, but are
+ accessed via service ``google.longrunning.Operations``.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ self._stubs = {}
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsAsyncClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if "operations_client" not in self.__dict__:
+ self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self.__dict__["operations_client"]
+
+ @property
+ def create_index(
+ self,
+ ) -> Callable[
+ [firestore_admin.CreateIndexRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the create index method over gRPC.
+
+ Creates a composite index. This returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the creation. The
+ metadata for the operation will be the type
+ [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
+
+ Returns:
+ Callable[[~.CreateIndexRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_index" not in self._stubs:
+ self._stubs["create_index"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex",
+ request_serializer=firestore_admin.CreateIndexRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["create_index"]
+
+ @property
+ def list_indexes(
+ self,
+ ) -> Callable[
+ [firestore_admin.ListIndexesRequest],
+ Awaitable[firestore_admin.ListIndexesResponse],
+ ]:
+ r"""Return a callable for the list indexes method over gRPC.
+
+ Lists composite indexes.
+
+ Returns:
+ Callable[[~.ListIndexesRequest],
+ Awaitable[~.ListIndexesResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_indexes" not in self._stubs:
+ self._stubs["list_indexes"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes",
+ request_serializer=firestore_admin.ListIndexesRequest.serialize,
+ response_deserializer=firestore_admin.ListIndexesResponse.deserialize,
+ )
+ return self._stubs["list_indexes"]
+
+ @property
+ def get_index(
+ self,
+ ) -> Callable[[firestore_admin.GetIndexRequest], Awaitable[index.Index]]:
+ r"""Return a callable for the get index method over gRPC.
+
+ Gets a composite index.
+
+ Returns:
+ Callable[[~.GetIndexRequest],
+ Awaitable[~.Index]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_index" not in self._stubs:
+ self._stubs["get_index"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/GetIndex",
+ request_serializer=firestore_admin.GetIndexRequest.serialize,
+ response_deserializer=index.Index.deserialize,
+ )
+ return self._stubs["get_index"]
+
+ @property
+ def delete_index(
+ self,
+ ) -> Callable[[firestore_admin.DeleteIndexRequest], Awaitable[empty.Empty]]:
+ r"""Return a callable for the delete index method over gRPC.
+
+ Deletes a composite index.
+
+ Returns:
+ Callable[[~.DeleteIndexRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_index" not in self._stubs:
+ self._stubs["delete_index"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex",
+ request_serializer=firestore_admin.DeleteIndexRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_index"]
+
+ @property
+ def get_field(
+ self,
+ ) -> Callable[[firestore_admin.GetFieldRequest], Awaitable[field.Field]]:
+ r"""Return a callable for the get field method over gRPC.
+
+ Gets the metadata and configuration for a Field.
+
+ Returns:
+ Callable[[~.GetFieldRequest],
+ Awaitable[~.Field]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_field" not in self._stubs:
+ self._stubs["get_field"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/GetField",
+ request_serializer=firestore_admin.GetFieldRequest.serialize,
+ response_deserializer=field.Field.deserialize,
+ )
+ return self._stubs["get_field"]
+
+ @property
+ def update_field(
+ self,
+ ) -> Callable[
+ [firestore_admin.UpdateFieldRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the update field method over gRPC.
+
+ Updates a field configuration. Currently, field updates apply
+ only to single field index configuration. However, calls to
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]
+ should provide a field mask to avoid changing any configuration
+ that the caller isn't aware of. The field mask should be
+ specified as: ``{ paths: "index_config" }``.
+
+ This call returns a
+ [google.longrunning.Operation][google.longrunning.Operation]
+ which may be used to track the status of the field update. The
+ metadata for the operation will be the type
+ [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata].
+
+ To configure the default field settings for the database, use
+ the special ``Field`` with resource name:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
+
+ Returns:
+ Callable[[~.UpdateFieldRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_field" not in self._stubs:
+ self._stubs["update_field"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/UpdateField",
+ request_serializer=firestore_admin.UpdateFieldRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["update_field"]
+
+ @property
+ def list_fields(
+ self,
+ ) -> Callable[
+ [firestore_admin.ListFieldsRequest],
+ Awaitable[firestore_admin.ListFieldsResponse],
+ ]:
+ r"""Return a callable for the list fields method over gRPC.
+
+ Lists the field configuration and metadata for this database.
+
+ Currently,
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ only supports listing fields that have been explicitly
+ overridden. To issue this query, call
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ with the filter set to ``indexConfig.usesAncestorConfig:false``.
+
+ Returns:
+ Callable[[~.ListFieldsRequest],
+ Awaitable[~.ListFieldsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_fields" not in self._stubs:
+ self._stubs["list_fields"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ListFields",
+ request_serializer=firestore_admin.ListFieldsRequest.serialize,
+ response_deserializer=firestore_admin.ListFieldsResponse.deserialize,
+ )
+ return self._stubs["list_fields"]
+
+ @property
+ def export_documents(
+ self,
+ ) -> Callable[
+ [firestore_admin.ExportDocumentsRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the export documents method over gRPC.
+
+ Exports a copy of all or a subset of documents from
+ Google Cloud Firestore to another storage system, such
+ as Google Cloud Storage. Recent updates to documents may
+ not be reflected in the export. The export occurs in the
+ background and its progress can be monitored and managed
+ via the Operation resource that is created. The output
+ of an export may only be used once the associated
+ operation is done. If an export operation is cancelled
+ before completion it may leave partial data behind in
+ Google Cloud Storage.
+
+ Returns:
+ Callable[[~.ExportDocumentsRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "export_documents" not in self._stubs:
+ self._stubs["export_documents"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments",
+ request_serializer=firestore_admin.ExportDocumentsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["export_documents"]
+
+ @property
+ def import_documents(
+ self,
+ ) -> Callable[
+ [firestore_admin.ImportDocumentsRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the import documents method over gRPC.
+
+ Imports documents into Google Cloud Firestore.
+ Existing documents with the same name are overwritten.
+ The import occurs in the background and its progress can
+ be monitored and managed via the Operation resource that
+ is created. If an ImportDocuments operation is
+ cancelled, it is possible that a subset of the data has
+ already been imported to Cloud Firestore.
+
+ Returns:
+ Callable[[~.ImportDocumentsRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_documents" not in self._stubs:
+ self._stubs["import_documents"] = self.grpc_channel.unary_unary(
+ "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments",
+ request_serializer=firestore_admin.ImportDocumentsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_documents"]
+
+
+__all__ = ("FirestoreAdminGrpcAsyncIOTransport",)
diff --git a/google/cloud/firestore_admin_v1/types.py b/google/cloud/firestore_admin_v1/types.py
deleted file mode 100644
index ca5f241644..0000000000
--- a/google/cloud/firestore_admin_v1/types.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from __future__ import absolute_import
-import sys
-
-from google.api_core.protobuf_helpers import get_messages
-
-from google.cloud.firestore_admin_v1.proto import field_pb2
-from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2
-from google.cloud.firestore_admin_v1.proto import index_pb2
-from google.cloud.firestore_admin_v1.proto import location_pb2
-from google.cloud.firestore_admin_v1.proto import operation_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import any_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import field_mask_pb2
-from google.protobuf import timestamp_pb2
-from google.rpc import status_pb2
-
-
-_shared_modules = [
- operations_pb2,
- any_pb2,
- empty_pb2,
- field_mask_pb2,
- timestamp_pb2,
- status_pb2,
-]
-
-_local_modules = [
- field_pb2,
- firestore_admin_pb2,
- index_pb2,
- location_pb2,
- operation_pb2,
-]
-
-names = []
-
-for module in _shared_modules: # pragma: NO COVER
- for name, message in get_messages(module).items():
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-for module in _local_modules:
- for name, message in get_messages(module).items():
- message.__module__ = "google.cloud.firestore_admin_v1.types"
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-
-__all__ = tuple(sorted(names))
diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py
new file mode 100644
index 0000000000..f5cbaa99c9
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/types/__init__.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .index import Index
+from .field import Field
+from .firestore_admin import (
+ CreateIndexRequest,
+ ListIndexesRequest,
+ ListIndexesResponse,
+ GetIndexRequest,
+ DeleteIndexRequest,
+ UpdateFieldRequest,
+ GetFieldRequest,
+ ListFieldsRequest,
+ ListFieldsResponse,
+ ExportDocumentsRequest,
+ ImportDocumentsRequest,
+)
+from .location import LocationMetadata
+from .operation import (
+ IndexOperationMetadata,
+ FieldOperationMetadata,
+ ExportDocumentsMetadata,
+ ImportDocumentsMetadata,
+ ExportDocumentsResponse,
+ Progress,
+)
+
+
+__all__ = (
+ "Index",
+ "Field",
+ "CreateIndexRequest",
+ "ListIndexesRequest",
+ "ListIndexesResponse",
+ "GetIndexRequest",
+ "DeleteIndexRequest",
+ "UpdateFieldRequest",
+ "GetFieldRequest",
+ "ListFieldsRequest",
+ "ListFieldsResponse",
+ "ExportDocumentsRequest",
+ "ImportDocumentsRequest",
+ "LocationMetadata",
+ "IndexOperationMetadata",
+ "FieldOperationMetadata",
+ "ExportDocumentsMetadata",
+ "ImportDocumentsMetadata",
+ "ExportDocumentsResponse",
+ "Progress",
+)
diff --git a/google/cloud/firestore_admin_v1/types/field.py b/google/cloud/firestore_admin_v1/types/field.py
new file mode 100644
index 0000000000..b63869b6e6
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/types/field.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_admin_v1.types import index
+
+
+__protobuf__ = proto.module(package="google.firestore.admin.v1", manifest={"Field",},)
+
+
+class Field(proto.Message):
+ r"""Represents a single field in the database.
+ Fields are grouped by their "Collection Group", which represent
+ all collections in the database with the same id.
+
+ Attributes:
+ name (str):
+ A field name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}``
+
+ A field path may be a simple field name, e.g. ``address`` or
+ a path to fields within map_value , e.g. ``address.city``,
+ or a special field path. The only valid special field is
+ ``*``, which represents any field.
+
+ Field paths may be quoted using
+ ``(backtick). The only character that needs to be escaped within a quoted field path is the backtick character itself, escaped using a backslash. Special characters in field paths that must be quoted include:``\ \*\ ``,``.\ :literal:`, ``` (backtick),`\ [``,``]`,
+ as well as any ascii symbolic characters.
+
+ Examples: (Note: Comments here are written in markdown
+ syntax, so there is an additional layer of backticks to
+ represent a code block)
+ ``\``\ address.city\`\ ``represents a field named``\ address.city\ ``, not the map key``\ city\ ``in the field``\ address\ ``.``\ \`\ *\`\ ``represents a field named``*\ \`,
+ not any field.
+
+ A special ``Field`` contains the default indexing settings
+ for all fields. This field's resource name is:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``
+ Indexes defined on this ``Field`` will be applied to all
+ fields which do not have their own ``Field`` index
+ configuration.
+ index_config (~.field.Field.IndexConfig):
+ The index configuration for this field. If unset, field
+ indexing will revert to the configuration defined by the
+ ``ancestor_field``. To explicitly remove all indexes for
+ this field, specify an index config with an empty list of
+ indexes.
+ """
+
+ class IndexConfig(proto.Message):
+ r"""The index configuration for this field.
+
+ Attributes:
+ indexes (Sequence[~.index.Index]):
+ The indexes supported for this field.
+ uses_ancestor_config (bool):
+ Output only. When true, the ``Field``'s index configuration
+ is set from the configuration specified by the
+ ``ancestor_field``. When false, the ``Field``'s index
+ configuration is defined explicitly.
+ ancestor_field (str):
+ Output only. Specifies the resource name of the ``Field``
+ from which this field's index configuration is set (when
+ ``uses_ancestor_config`` is true), or from which it *would*
+ be set if this field had no index configuration (when
+ ``uses_ancestor_config`` is false).
+ reverting (bool):
+ Output only When true, the ``Field``'s index configuration
+ is in the process of being reverted. Once complete, the
+ index config will transition to the same state as the field
+ specified by ``ancestor_field``, at which point
+ ``uses_ancestor_config`` will be ``true`` and ``reverting``
+ will be ``false``.
+ """
+
+ indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=index.Index,)
+
+ uses_ancestor_config = proto.Field(proto.BOOL, number=2)
+
+ ancestor_field = proto.Field(proto.STRING, number=3)
+
+ reverting = proto.Field(proto.BOOL, number=4)
+
+ name = proto.Field(proto.STRING, number=1)
+
+ index_config = proto.Field(proto.MESSAGE, number=2, message=IndexConfig,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py
new file mode 100644
index 0000000000..7a365edb34
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/types/firestore_admin.py
@@ -0,0 +1,277 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_admin_v1.types import field as gfa_field
+from google.cloud.firestore_admin_v1.types import index as gfa_index
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.admin.v1",
+ manifest={
+ "CreateIndexRequest",
+ "ListIndexesRequest",
+ "ListIndexesResponse",
+ "GetIndexRequest",
+ "DeleteIndexRequest",
+ "UpdateFieldRequest",
+ "GetFieldRequest",
+ "ListFieldsRequest",
+ "ListFieldsResponse",
+ "ExportDocumentsRequest",
+ "ImportDocumentsRequest",
+ },
+)
+
+
+class CreateIndexRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
+
+ Attributes:
+ parent (str):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ index (~.gfa_index.Index):
+ Required. The composite index to create.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ index = proto.Field(proto.MESSAGE, number=2, message=gfa_index.Index,)
+
+
+class ListIndexesRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+
+ Attributes:
+ parent (str):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ filter (str):
+ The filter to apply to list results.
+ page_size (int):
+ The number of results to return.
+ page_token (str):
+ A page token, returned from a previous call to
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes],
+ that may be used to get the next page of results.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ filter = proto.Field(proto.STRING, number=2)
+
+ page_size = proto.Field(proto.INT32, number=3)
+
+ page_token = proto.Field(proto.STRING, number=4)
+
+
+class ListIndexesResponse(proto.Message):
+ r"""The response for
+ [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
+
+ Attributes:
+ indexes (Sequence[~.gfa_index.Index]):
+ The requested indexes.
+ next_page_token (str):
+ A page token that may be used to request
+ another page of results. If blank, this is the
+ last page.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=gfa_index.Index,)
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+
+class GetIndexRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
+
+ Attributes:
+ name (str):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class DeleteIndexRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
+
+ Attributes:
+ name (str):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class UpdateFieldRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
+
+ Attributes:
+ field (~.gfa_field.Field):
+ Required. The field to be updated.
+ update_mask (~.field_mask.FieldMask):
+ A mask, relative to the field. If specified, only
+ configuration specified by this field_mask will be updated
+ in the field.
+ """
+
+ field = proto.Field(proto.MESSAGE, number=1, message=gfa_field.Field,)
+
+ update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,)
+
+
+class GetFieldRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
+
+ Attributes:
+ name (str):
+ Required. A name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class ListFieldsRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+
+ Attributes:
+ parent (str):
+ Required. A parent name of the form
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+ filter (str):
+ The filter to apply to list results. Currently,
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ only supports listing fields that have been explicitly
+ overridden. To issue this query, call
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]
+ with the filter set to
+ ``indexConfig.usesAncestorConfig:false``.
+ page_size (int):
+ The number of results to return.
+ page_token (str):
+ A page token, returned from a previous call to
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields],
+ that may be used to get the next page of results.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ filter = proto.Field(proto.STRING, number=2)
+
+ page_size = proto.Field(proto.INT32, number=3)
+
+ page_token = proto.Field(proto.STRING, number=4)
+
+
+class ListFieldsResponse(proto.Message):
+ r"""The response for
+ [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
+
+ Attributes:
+ fields (Sequence[~.gfa_field.Field]):
+ The requested fields.
+ next_page_token (str):
+ A page token that may be used to request
+ another page of results. If blank, this is the
+ last page.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ fields = proto.RepeatedField(proto.MESSAGE, number=1, message=gfa_field.Field,)
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+
+class ExportDocumentsRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
+
+ Attributes:
+ name (str):
+ Required. Database to export. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ collection_ids (Sequence[str]):
+ Which collection ids to export. Unspecified
+ means all collections.
+ output_uri_prefix (str):
+ The output URI. Currently only supports Google Cloud Storage
+ URIs of the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``,
+ where ``BUCKET_NAME`` is the name of the Google Cloud
+ Storage bucket and ``NAMESPACE_PATH`` is an optional Google
+ Cloud Storage namespace path. When choosing a name, be sure
+ to consider Google Cloud Storage naming guidelines:
+ https://cloud.google.com/storage/docs/naming. If the URI is
+ a bucket (without a namespace path), a prefix will be
+ generated based on the start time.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ collection_ids = proto.RepeatedField(proto.STRING, number=2)
+
+ output_uri_prefix = proto.Field(proto.STRING, number=3)
+
+
+class ImportDocumentsRequest(proto.Message):
+ r"""The request for
+ [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
+
+ Attributes:
+ name (str):
+ Required. Database to import into. Should be of the form:
+ ``projects/{project_id}/databases/{database_id}``.
+ collection_ids (Sequence[str]):
+ Which collection ids to import. Unspecified
+ means all collections included in the import.
+ input_uri_prefix (str):
+ Location of the exported files. This must match the
+ output_uri_prefix of an ExportDocumentsResponse from an
+ export that has completed successfully. See:
+ [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix].
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ collection_ids = proto.RepeatedField(proto.STRING, number=2)
+
+ input_uri_prefix = proto.Field(proto.STRING, number=3)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_admin_v1/types/index.py b/google/cloud/firestore_admin_v1/types/index.py
new file mode 100644
index 0000000000..3f10dfb081
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/types/index.py
@@ -0,0 +1,134 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+__protobuf__ = proto.module(package="google.firestore.admin.v1", manifest={"Index",},)
+
+
+class Index(proto.Message):
+ r"""Cloud Firestore indexes enable simple and complex queries
+ against documents in a database.
+
+ Attributes:
+ name (str):
+ Output only. A server defined name for this index. The form
+ of this name for composite indexes will be:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}``
+ For single field indexes, this field will be empty.
+ query_scope (~.index.Index.QueryScope):
+ Indexes with a collection query scope
+ specified allow queries against a collection
+ that is the child of a specific document,
+ specified at query time, and that has the same
+ collection id.
+ Indexes with a collection group query scope
+ specified allow queries against all collections
+ descended from a specific document, specified at
+ query time, and that have the same collection id
+ as this index.
+ fields (Sequence[~.index.Index.IndexField]):
+ The fields supported by this index.
+
+ For composite indexes, this is always 2 or more fields. The
+ last field entry is always for the field path ``__name__``.
+ If, on creation, ``__name__`` was not specified as the last
+ field, it will be added automatically with the same
+ direction as that of the last field defined. If the final
+ field in a composite index is not directional, the
+ ``__name__`` will be ordered ASCENDING (unless explicitly
+ specified).
+
+ For single field indexes, this will always be exactly one
+ entry with a field path equal to the field path of the
+ associated field.
+ state (~.index.Index.State):
+ Output only. The serving state of the index.
+ """
+
+ class QueryScope(proto.Enum):
+ r"""Query Scope defines the scope at which a query is run. This is
+ specified on a StructuredQuery's ``from`` field.
+ """
+ QUERY_SCOPE_UNSPECIFIED = 0
+ COLLECTION = 1
+ COLLECTION_GROUP = 2
+
+ class State(proto.Enum):
+ r"""The state of an index. During index creation, an index will be in
+ the ``CREATING`` state. If the index is created successfully, it
+ will transition to the ``READY`` state. If the index creation
+ encounters a problem, the index will transition to the
+ ``NEEDS_REPAIR`` state.
+ """
+ STATE_UNSPECIFIED = 0
+ CREATING = 1
+ READY = 2
+ NEEDS_REPAIR = 3
+
+ class IndexField(proto.Message):
+ r"""A field in an index. The field_path describes which field is
+ indexed, the value_mode describes how the field value is indexed.
+
+ Attributes:
+ field_path (str):
+ Can be **name**. For single field indexes, this must match
+ the name of the field or may be omitted.
+ order (~.index.Index.IndexField.Order):
+ Indicates that this field supports ordering
+ by the specified order or comparing using =, <,
+ <=, >, >=.
+ array_config (~.index.Index.IndexField.ArrayConfig):
+ Indicates that this field supports operations on
+ ``array_value``\ s.
+ """
+
+ class Order(proto.Enum):
+ r"""The supported orderings."""
+ ORDER_UNSPECIFIED = 0
+ ASCENDING = 1
+ DESCENDING = 2
+
+ class ArrayConfig(proto.Enum):
+ r"""The supported array value configurations."""
+ ARRAY_CONFIG_UNSPECIFIED = 0
+ CONTAINS = 1
+
+ field_path = proto.Field(proto.STRING, number=1)
+
+ order = proto.Field(
+ proto.ENUM, number=2, oneof="value_mode", enum="Index.IndexField.Order",
+ )
+
+ array_config = proto.Field(
+ proto.ENUM,
+ number=3,
+ oneof="value_mode",
+ enum="Index.IndexField.ArrayConfig",
+ )
+
+ name = proto.Field(proto.STRING, number=1)
+
+ query_scope = proto.Field(proto.ENUM, number=2, enum=QueryScope,)
+
+ fields = proto.RepeatedField(proto.MESSAGE, number=3, message=IndexField,)
+
+ state = proto.Field(proto.ENUM, number=4, enum=State,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_admin_v1/types/location.py b/google/cloud/firestore_admin_v1/types/location.py
new file mode 100644
index 0000000000..5259f44be9
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/types/location.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.admin.v1", manifest={"LocationMetadata",},
+)
+
+
+class LocationMetadata(proto.Message):
+ r"""The metadata message for
+ [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
+ """
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_admin_v1/types/operation.py b/google/cloud/firestore_admin_v1/types/operation.py
new file mode 100644
index 0000000000..29e902f46c
--- /dev/null
+++ b/google/cloud/firestore_admin_v1/types/operation.py
@@ -0,0 +1,272 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_admin_v1.types import index as gfa_index
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.admin.v1",
+ manifest={
+ "OperationState",
+ "IndexOperationMetadata",
+ "FieldOperationMetadata",
+ "ExportDocumentsMetadata",
+ "ImportDocumentsMetadata",
+ "ExportDocumentsResponse",
+ "Progress",
+ },
+)
+
+
+class OperationState(proto.Enum):
+ r"""Describes the state of the operation."""
+ OPERATION_STATE_UNSPECIFIED = 0
+ INITIALIZING = 1
+ PROCESSING = 2
+ CANCELLING = 3
+ FINALIZING = 4
+ SUCCESSFUL = 5
+ FAILED = 6
+ CANCELLED = 7
+
+
+class IndexOperationMetadata(proto.Message):
+ r"""Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from
+ [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
+
+ Attributes:
+ start_time (~.timestamp.Timestamp):
+ The time this operation started.
+ end_time (~.timestamp.Timestamp):
+ The time this operation completed. Will be
+ unset if operation still in progress.
+ index (str):
+ The index resource that this operation is acting on. For
+ example:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+ state (~.operation.OperationState):
+ The state of the operation.
+ progress_documents (~.operation.Progress):
+ The progress, in documents, of this
+ operation.
+ progress_bytes (~.operation.Progress):
+ The progress, in bytes, of this operation.
+ """
+
+ start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ index = proto.Field(proto.STRING, number=3)
+
+ state = proto.Field(proto.ENUM, number=4, enum="OperationState",)
+
+ progress_documents = proto.Field(proto.MESSAGE, number=5, message="Progress",)
+
+ progress_bytes = proto.Field(proto.MESSAGE, number=6, message="Progress",)
+
+
+class FieldOperationMetadata(proto.Message):
+ r"""Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from
+ [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
+
+ Attributes:
+ start_time (~.timestamp.Timestamp):
+ The time this operation started.
+ end_time (~.timestamp.Timestamp):
+ The time this operation completed. Will be
+ unset if operation still in progress.
+ field (str):
+ The field resource that this operation is acting on. For
+ example:
+ ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}``
+ index_config_deltas (Sequence[~.operation.FieldOperationMetadata.IndexConfigDelta]):
+ A list of
+ [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta],
+ which describe the intent of this operation.
+ state (~.operation.OperationState):
+ The state of the operation.
+ progress_documents (~.operation.Progress):
+ The progress, in documents, of this
+ operation.
+ progress_bytes (~.operation.Progress):
+ The progress, in bytes, of this operation.
+ """
+
+ class IndexConfigDelta(proto.Message):
+ r"""Information about an index configuration change.
+
+ Attributes:
+ change_type (~.operation.FieldOperationMetadata.IndexConfigDelta.ChangeType):
+ Specifies how the index is changing.
+ index (~.gfa_index.Index):
+ The index being changed.
+ """
+
+ class ChangeType(proto.Enum):
+ r"""Specifies how the index is changing."""
+ CHANGE_TYPE_UNSPECIFIED = 0
+ ADD = 1
+ REMOVE = 2
+
+ change_type = proto.Field(
+ proto.ENUM,
+ number=1,
+ enum="FieldOperationMetadata.IndexConfigDelta.ChangeType",
+ )
+
+ index = proto.Field(proto.MESSAGE, number=2, message=gfa_index.Index,)
+
+ start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ field = proto.Field(proto.STRING, number=3)
+
+ index_config_deltas = proto.RepeatedField(
+ proto.MESSAGE, number=4, message=IndexConfigDelta,
+ )
+
+ state = proto.Field(proto.ENUM, number=5, enum="OperationState",)
+
+ progress_documents = proto.Field(proto.MESSAGE, number=6, message="Progress",)
+
+ progress_bytes = proto.Field(proto.MESSAGE, number=7, message="Progress",)
+
+
+class ExportDocumentsMetadata(proto.Message):
+ r"""Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from
+ [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
+
+ Attributes:
+ start_time (~.timestamp.Timestamp):
+ The time this operation started.
+ end_time (~.timestamp.Timestamp):
+ The time this operation completed. Will be
+ unset if operation still in progress.
+ operation_state (~.operation.OperationState):
+ The state of the export operation.
+ progress_documents (~.operation.Progress):
+ The progress, in documents, of this
+ operation.
+ progress_bytes (~.operation.Progress):
+ The progress, in bytes, of this operation.
+ collection_ids (Sequence[str]):
+ Which collection ids are being exported.
+ output_uri_prefix (str):
+ Where the entities are being exported to.
+ """
+
+ start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ operation_state = proto.Field(proto.ENUM, number=3, enum="OperationState",)
+
+ progress_documents = proto.Field(proto.MESSAGE, number=4, message="Progress",)
+
+ progress_bytes = proto.Field(proto.MESSAGE, number=5, message="Progress",)
+
+ collection_ids = proto.RepeatedField(proto.STRING, number=6)
+
+ output_uri_prefix = proto.Field(proto.STRING, number=7)
+
+
+class ImportDocumentsMetadata(proto.Message):
+ r"""Metadata for
+ [google.longrunning.Operation][google.longrunning.Operation] results
+ from
+ [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
+
+ Attributes:
+ start_time (~.timestamp.Timestamp):
+ The time this operation started.
+ end_time (~.timestamp.Timestamp):
+ The time this operation completed. Will be
+ unset if operation still in progress.
+ operation_state (~.operation.OperationState):
+ The state of the import operation.
+ progress_documents (~.operation.Progress):
+ The progress, in documents, of this
+ operation.
+ progress_bytes (~.operation.Progress):
+ The progress, in bytes, of this operation.
+ collection_ids (Sequence[str]):
+ Which collection ids are being imported.
+ input_uri_prefix (str):
+ The location of the documents being imported.
+ """
+
+ start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ operation_state = proto.Field(proto.ENUM, number=3, enum="OperationState",)
+
+ progress_documents = proto.Field(proto.MESSAGE, number=4, message="Progress",)
+
+ progress_bytes = proto.Field(proto.MESSAGE, number=5, message="Progress",)
+
+ collection_ids = proto.RepeatedField(proto.STRING, number=6)
+
+ input_uri_prefix = proto.Field(proto.STRING, number=7)
+
+
+class ExportDocumentsResponse(proto.Message):
+ r"""Returned in the
+ [google.longrunning.Operation][google.longrunning.Operation]
+ response field.
+
+ Attributes:
+ output_uri_prefix (str):
+ Location of the output files. This can be
+ used to begin an import into Cloud Firestore
+ (this project or another project) after the
+ operation completes successfully.
+ """
+
+ output_uri_prefix = proto.Field(proto.STRING, number=1)
+
+
+class Progress(proto.Message):
+ r"""Describes the progress of the operation. Unit of work is generic and
+ must be interpreted based on where
+ [Progress][google.firestore.admin.v1.Progress] is used.
+
+ Attributes:
+ estimated_work (int):
+ The amount of work estimated.
+ completed_work (int):
+ The amount of work completed.
+ """
+
+ estimated_work = proto.Field(proto.INT64, number=1)
+
+ completed_work = proto.Field(proto.INT64, number=2)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/__init__.py b/google/cloud/firestore_v1/__init__.py
index e4af45218e..e6100331a4 100644
--- a/google/cloud/firestore_v1/__init__.py
+++ b/google/cloud/firestore_v1/__init__.py
@@ -1,4 +1,6 @@
-# Copyright 2019 Google LLC All rights reserved.
+# -*- coding: utf-8 -*-
+
+# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,12 +13,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+
"""Python idiomatic client for Google Cloud Firestore."""
-from pkg_resources import get_distribution
-__version__ = get_distribution("google-cloud-firestore").version
+import pkg_resources
+
+try:
+ __version__ = pkg_resources.get_distribution("google-cloud-firestore").version
+except pkg_resources.DistributionNotFound:
+ __version__ = None
from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1._helpers import GeoPoint
@@ -24,9 +32,22 @@
from google.cloud.firestore_v1._helpers import LastUpdateOption
from google.cloud.firestore_v1._helpers import ReadAfterWriteError
from google.cloud.firestore_v1._helpers import WriteOption
+from google.cloud.firestore_v1.async_batch import AsyncWriteBatch
+from google.cloud.firestore_v1.async_client import AsyncClient
+from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+from google.cloud.firestore_v1.async_query import AsyncQuery
+from google.cloud.firestore_v1.async_transaction import async_transactional
+from google.cloud.firestore_v1.async_transaction import AsyncTransaction
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
from google.cloud.firestore_v1.batch import WriteBatch
from google.cloud.firestore_v1.client import Client
from google.cloud.firestore_v1.collection import CollectionReference
+from google.cloud.firestore_v1.document import DocumentReference
+from google.cloud.firestore_v1.query import CollectionGroup
+from google.cloud.firestore_v1.query import Query
+from google.cloud.firestore_v1.transaction import Transaction
+from google.cloud.firestore_v1.transaction import transactional
from google.cloud.firestore_v1.transforms import ArrayRemove
from google.cloud.firestore_v1.transforms import ArrayUnion
from google.cloud.firestore_v1.transforms import DELETE_FIELD
@@ -34,25 +55,77 @@
from google.cloud.firestore_v1.transforms import Maximum
from google.cloud.firestore_v1.transforms import Minimum
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
-from google.cloud.firestore_v1.document import DocumentReference
-from google.cloud.firestore_v1.document import DocumentSnapshot
-from google.cloud.firestore_v1.gapic import enums
-from google.cloud.firestore_v1.query import Query
-from google.cloud.firestore_v1.transaction import Transaction
-from google.cloud.firestore_v1.transaction import transactional
from google.cloud.firestore_v1.watch import Watch
-__all__ = [
+# TODO(https://github.com/googleapis/python-firestore/issues/93): this is all on the generated surface. We require this to match
+# firestore.py. So comment out until needed on customer level for certain.
+# from .services.firestore import FirestoreClient
+# from .types.common import DocumentMask
+# from .types.common import Precondition
+# from .types.common import TransactionOptions
+# from .types.document import ArrayValue
+# from .types.document import Document
+# from .types.document import MapValue
+# from .types.document import Value
+# from .types.firestore import BatchGetDocumentsRequest
+# from .types.firestore import BatchGetDocumentsResponse
+# from .types.firestore import BatchWriteRequest
+# from .types.firestore import BatchWriteResponse
+# from .types.firestore import BeginTransactionRequest
+# from .types.firestore import BeginTransactionResponse
+# from .types.firestore import CommitRequest
+# from .types.firestore import CommitResponse
+# from .types.firestore import CreateDocumentRequest
+# from .types.firestore import DeleteDocumentRequest
+# from .types.firestore import GetDocumentRequest
+# from .types.firestore import ListCollectionIdsRequest
+# from .types.firestore import ListCollectionIdsResponse
+# from .types.firestore import ListDocumentsRequest
+# from .types.firestore import ListDocumentsResponse
+# from .types.firestore import ListenRequest
+# from .types.firestore import ListenResponse
+# from .types.firestore import PartitionQueryRequest
+# from .types.firestore import PartitionQueryResponse
+# from .types.firestore import RollbackRequest
+# from .types.firestore import RunQueryRequest
+# from .types.firestore import RunQueryResponse
+# from .types.firestore import Target
+# from .types.firestore import TargetChange
+# from .types.firestore import UpdateDocumentRequest
+# from .types.firestore import WriteRequest
+# from .types.firestore import WriteResponse
+# from .types.query import Cursor
+# from .types.query import StructuredQuery
+# from .types.write import DocumentChange
+# from .types.write import DocumentDelete
+# from .types.write import DocumentRemove
+from .types.write import DocumentTransform
+from typing import List
+
+
+# from .types.write import ExistenceFilter
+# from .types.write import Write
+# from .types.write import WriteResult
+
+__all__: List[str] = [
"__version__",
"ArrayRemove",
"ArrayUnion",
+ "AsyncClient",
+ "AsyncCollectionReference",
+ "AsyncDocumentReference",
+ "AsyncQuery",
+ "async_transactional",
+ "AsyncTransaction",
+ "AsyncWriteBatch",
"Client",
+ "CollectionGroup",
"CollectionReference",
"DELETE_FIELD",
"DocumentReference",
"DocumentSnapshot",
- "enums",
+ "DocumentTransform",
"ExistsOption",
"GeoPoint",
"Increment",
diff --git a/google/cloud/firestore_v1/_helpers.py b/google/cloud/firestore_v1/_helpers.py
index 34e7c5bbfa..89cf3b0025 100644
--- a/google/cloud/firestore_v1/_helpers.py
+++ b/google/cloud/firestore_v1/_helpers.py
@@ -16,22 +16,26 @@
import datetime
+from google.api_core.datetime_helpers import DatetimeWithNanoseconds # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google.protobuf import struct_pb2
-from google.type import latlng_pb2
-import grpc
-import six
+from google.type import latlng_pb2 # type: ignore
+import grpc # type: ignore
-from google.cloud import exceptions
-from google.cloud._helpers import _datetime_to_pb_timestamp
-from google.api_core.datetime_helpers import DatetimeWithNanoseconds
+from google.cloud import exceptions # type: ignore
+from google.cloud._helpers import _datetime_to_pb_timestamp # type: ignore
+from google.cloud.firestore_v1.types.write import DocumentTransform
from google.cloud.firestore_v1 import transforms
from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1.field_path import FieldPath
from google.cloud.firestore_v1.field_path import parse_field_path
-from google.cloud.firestore_v1.gapic import enums
-from google.cloud.firestore_v1.proto import common_pb2
-from google.cloud.firestore_v1.proto import document_pb2
-from google.cloud.firestore_v1.proto import write_pb2
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import write
+from typing import Any, Generator, List, NoReturn, Optional, Tuple, Union
+
+_EmptyDict: transforms.Sentinel
+_GRPC_ERROR_MAPPING: dict
BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}."
@@ -46,7 +50,7 @@
WRONG_APP_REFERENCE = (
"Document {!r} does not correspond to the same database " "({!r}) as the client."
)
-REQUEST_TIME_ENUM = enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME
+REQUEST_TIME_ENUM = DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME
_GRPC_ERROR_MAPPING = {
grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict,
grpc.StatusCode.NOT_FOUND: exceptions.NotFound,
@@ -61,11 +65,11 @@ class GeoPoint(object):
longitude (float): Longitude of a point.
"""
- def __init__(self, latitude, longitude):
+ def __init__(self, latitude, longitude) -> None:
self.latitude = latitude
self.longitude = longitude
- def to_protobuf(self):
+ def to_protobuf(self) -> latlng_pb2.LatLng:
"""Convert the current object to protobuf.
Returns:
@@ -101,7 +105,7 @@ def __ne__(self, other):
return not equality_val
-def verify_path(path, is_collection):
+def verify_path(path, is_collection) -> None:
"""Verifies that a ``path`` has the correct form.
Checks that all of the elements in ``path`` are strings.
@@ -132,12 +136,12 @@ def verify_path(path, is_collection):
raise ValueError("A document must have an even number of path elements")
for element in path:
- if not isinstance(element, six.string_types):
+ if not isinstance(element, str):
msg = BAD_PATH_TEMPLATE.format(element, type(element))
raise ValueError(msg)
-def encode_value(value):
+def encode_value(value) -> types.document.Value:
"""Converts a native Python value into a Firestore protobuf ``Value``.
Args:
@@ -153,55 +157,55 @@ def encode_value(value):
TypeError: If the ``value`` is not one of the accepted types.
"""
if value is None:
- return document_pb2.Value(null_value=struct_pb2.NULL_VALUE)
+ return document.Value(null_value=struct_pb2.NULL_VALUE)
- # Must come before six.integer_types since ``bool`` is an integer subtype.
+ # Must come before int since ``bool`` is an integer subtype.
if isinstance(value, bool):
- return document_pb2.Value(boolean_value=value)
+ return document.Value(boolean_value=value)
- if isinstance(value, six.integer_types):
- return document_pb2.Value(integer_value=value)
+ if isinstance(value, int):
+ return document.Value(integer_value=value)
if isinstance(value, float):
- return document_pb2.Value(double_value=value)
+ return document.Value(double_value=value)
if isinstance(value, DatetimeWithNanoseconds):
- return document_pb2.Value(timestamp_value=value.timestamp_pb())
+ return document.Value(timestamp_value=value.timestamp_pb())
if isinstance(value, datetime.datetime):
- return document_pb2.Value(timestamp_value=_datetime_to_pb_timestamp(value))
+ return document.Value(timestamp_value=_datetime_to_pb_timestamp(value))
- if isinstance(value, six.text_type):
- return document_pb2.Value(string_value=value)
+ if isinstance(value, str):
+ return document.Value(string_value=value)
- if isinstance(value, six.binary_type):
- return document_pb2.Value(bytes_value=value)
+ if isinstance(value, bytes):
+ return document.Value(bytes_value=value)
# NOTE: We avoid doing an isinstance() check for a Document
# here to avoid import cycles.
document_path = getattr(value, "_document_path", None)
if document_path is not None:
- return document_pb2.Value(reference_value=document_path)
+ return document.Value(reference_value=document_path)
if isinstance(value, GeoPoint):
- return document_pb2.Value(geo_point_value=value.to_protobuf())
+ return document.Value(geo_point_value=value.to_protobuf())
if isinstance(value, (list, tuple, set, frozenset)):
value_list = tuple(encode_value(element) for element in value)
- value_pb = document_pb2.ArrayValue(values=value_list)
- return document_pb2.Value(array_value=value_pb)
+ value_pb = document.ArrayValue(values=value_list)
+ return document.Value(array_value=value_pb)
if isinstance(value, dict):
value_dict = encode_dict(value)
- value_pb = document_pb2.MapValue(fields=value_dict)
- return document_pb2.Value(map_value=value_pb)
+ value_pb = document.MapValue(fields=value_dict)
+ return document.Value(map_value=value_pb)
raise TypeError(
"Cannot convert to a Firestore Value", value, "Invalid type", type(value)
)
-def encode_dict(values_dict):
+def encode_dict(values_dict) -> dict:
"""Encode a dictionary into protobuf ``Value``-s.
Args:
@@ -212,10 +216,10 @@ def encode_dict(values_dict):
dictionary of string keys and ``Value`` protobufs as dictionary
values.
"""
- return {key: encode_value(value) for key, value in six.iteritems(values_dict)}
+ return {key: encode_value(value) for key, value in values_dict.items()}
-def reference_value_to_document(reference_value, client):
+def reference_value_to_document(reference_value, client) -> Any:
"""Convert a reference value string to a document.
Args:
@@ -249,7 +253,9 @@ def reference_value_to_document(reference_value, client):
return document
-def decode_value(value, client):
+def decode_value(
+ value, client
+) -> Union[None, bool, int, float, list, datetime.datetime, str, bytes, dict, GeoPoint]:
"""Converts a Firestore protobuf ``Value`` to a native Python value.
Args:
@@ -267,7 +273,7 @@ def decode_value(value, client):
NotImplementedError: If the ``value_type`` is ``reference_value``.
ValueError: If the ``value_type`` is unknown.
"""
- value_type = value.WhichOneof("value_type")
+ value_type = value._pb.WhichOneof("value_type")
if value_type == "null_value":
return None
@@ -278,7 +284,7 @@ def decode_value(value, client):
elif value_type == "double_value":
return value.double_value
elif value_type == "timestamp_value":
- return DatetimeWithNanoseconds.from_timestamp_pb(value.timestamp_value)
+ return DatetimeWithNanoseconds.from_timestamp_pb(value._pb.timestamp_value)
elif value_type == "string_value":
return value.string_value
elif value_type == "bytes_value":
@@ -295,7 +301,7 @@ def decode_value(value, client):
raise ValueError("Unknown ``value_type``", value_type)
-def decode_dict(value_fields, client):
+def decode_dict(value_fields, client) -> dict:
"""Converts a protobuf map of Firestore ``Value``-s.
Args:
@@ -309,17 +315,15 @@ def decode_dict(value_fields, client):
str, bytes, dict, ~google.cloud.Firestore.GeoPoint]]: A dictionary
of native Python values converted from the ``value_fields``.
"""
- return {
- key: decode_value(value, client) for key, value in six.iteritems(value_fields)
- }
+ return {key: decode_value(value, client) for key, value in value_fields.items()}
-def get_doc_id(document_pb, expected_prefix):
+def get_doc_id(document_pb, expected_prefix) -> str:
"""Parse a document ID from a document protobuf.
Args:
document_pb (google.cloud.proto.firestore.v1.\
- document_pb2.Document): A protobuf for a document that
+ document.Document): A protobuf for a document that
was created in a ``CreateDocument`` RPC.
expected_prefix (str): The expected collection prefix for the
fully-qualified document name.
@@ -345,12 +349,14 @@ def get_doc_id(document_pb, expected_prefix):
_EmptyDict = transforms.Sentinel("Marker for an empty dict value")
-def extract_fields(document_data, prefix_path, expand_dots=False):
+def extract_fields(
+ document_data, prefix_path: FieldPath, expand_dots=False
+) -> Generator[Tuple[Any, Any], Any, None]:
"""Do depth-first walk of tree, yielding field_path, value"""
if not document_data:
yield prefix_path, _EmptyDict
else:
- for key, value in sorted(six.iteritems(document_data)):
+ for key, value in sorted(document_data.items()):
if expand_dots:
sub_key = FieldPath.from_string(key)
@@ -366,7 +372,7 @@ def extract_fields(document_data, prefix_path, expand_dots=False):
yield field_path, value
-def set_field_value(document_data, field_path, value):
+def set_field_value(document_data, field_path, value) -> None:
"""Set a value into a document for a field_path"""
current = document_data
for element in field_path.parts[:-1]:
@@ -376,7 +382,7 @@ def set_field_value(document_data, field_path, value):
current[field_path.parts[-1]] = value
-def get_field_value(document_data, field_path):
+def get_field_value(document_data, field_path) -> Any:
if not field_path.parts:
raise ValueError("Empty path")
@@ -397,7 +403,7 @@ class DocumentExtractor(object):
a document.
"""
- def __init__(self, document_data):
+ def __init__(self, document_data) -> None:
self.document_data = document_data
self.field_paths = []
self.deleted_fields = []
@@ -443,7 +449,9 @@ def __init__(self, document_data):
self.field_paths.append(field_path)
set_field_value(self.set_fields, field_path, value)
- def _get_document_iterator(self, prefix_path):
+ def _get_document_iterator(
+ self, prefix_path: FieldPath
+ ) -> Generator[Tuple[Any, Any], Any, None]:
return extract_fields(self.document_data, prefix_path)
@property
@@ -468,18 +476,20 @@ def transform_paths(self):
+ list(self.minimums)
)
- def _get_update_mask(self, allow_empty_mask=False):
+ def _get_update_mask(self, allow_empty_mask=False) -> None:
return None
- def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):
+ def get_update_pb(
+ self, document_path, exists=None, allow_empty_mask=False
+ ) -> types.write.Write:
if exists is not None:
- current_document = common_pb2.Precondition(exists=exists)
+ current_document = common.Precondition(exists=exists)
else:
current_document = None
- update_pb = write_pb2.Write(
- update=document_pb2.Document(
+ update_pb = write.Write(
+ update=document.Document(
name=document_path, fields=encode_dict(self.set_fields)
),
update_mask=self._get_update_mask(allow_empty_mask),
@@ -488,16 +498,18 @@ def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):
return update_pb
- def get_transform_pb(self, document_path, exists=None):
+ def get_field_transform_pbs(
+ self, document_path
+ ) -> List[types.write.DocumentTransform.FieldTransform]:
def make_array_value(values):
value_list = [encode_value(element) for element in values]
- return document_pb2.ArrayValue(values=value_list)
+ return document.ArrayValue(values=value_list)
path_field_transforms = (
[
(
path,
- write_pb2.DocumentTransform.FieldTransform(
+ write.DocumentTransform.FieldTransform(
field_path=path.to_api_repr(),
set_to_server_value=REQUEST_TIME_ENUM,
),
@@ -507,7 +519,7 @@ def make_array_value(values):
+ [
(
path,
- write_pb2.DocumentTransform.FieldTransform(
+ write.DocumentTransform.FieldTransform(
field_path=path.to_api_repr(),
remove_all_from_array=make_array_value(values),
),
@@ -517,7 +529,7 @@ def make_array_value(values):
+ [
(
path,
- write_pb2.DocumentTransform.FieldTransform(
+ write.DocumentTransform.FieldTransform(
field_path=path.to_api_repr(),
append_missing_elements=make_array_value(values),
),
@@ -527,7 +539,7 @@ def make_array_value(values):
+ [
(
path,
- write_pb2.DocumentTransform.FieldTransform(
+ write.DocumentTransform.FieldTransform(
field_path=path.to_api_repr(), increment=encode_value(value)
),
)
@@ -536,7 +548,7 @@ def make_array_value(values):
+ [
(
path,
- write_pb2.DocumentTransform.FieldTransform(
+ write.DocumentTransform.FieldTransform(
field_path=path.to_api_repr(), maximum=encode_value(value)
),
)
@@ -545,30 +557,31 @@ def make_array_value(values):
+ [
(
path,
- write_pb2.DocumentTransform.FieldTransform(
+ write.DocumentTransform.FieldTransform(
field_path=path.to_api_repr(), minimum=encode_value(value)
),
)
for path, value in self.minimums.items()
]
)
- field_transforms = [
- transform for path, transform in sorted(path_field_transforms)
- ]
- transform_pb = write_pb2.Write(
- transform=write_pb2.DocumentTransform(
+ return [transform for path, transform in sorted(path_field_transforms)]
+
+ def get_transform_pb(self, document_path, exists=None) -> types.write.Write:
+ field_transforms = self.get_field_transform_pbs(document_path)
+ transform_pb = write.Write(
+ transform=write.DocumentTransform(
document=document_path, field_transforms=field_transforms
)
)
if exists is not None:
- transform_pb.current_document.CopyFrom(
- common_pb2.Precondition(exists=exists)
+ transform_pb._pb.current_document.CopyFrom(
+ common.Precondition(exists=exists)._pb
)
return transform_pb
-def pbs_for_create(document_path, document_data):
+def pbs_for_create(document_path, document_data) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``create()`` methods.
Args:
@@ -585,22 +598,16 @@ def pbs_for_create(document_path, document_data):
if extractor.deleted_fields:
raise ValueError("Cannot apply DELETE_FIELD in a create request.")
- write_pbs = []
-
- # Conformance tests require skipping the 'update_pb' if the document
- # contains only transforms.
- if extractor.empty_document or extractor.set_fields:
- write_pbs.append(extractor.get_update_pb(document_path, exists=False))
+ create_pb = extractor.get_update_pb(document_path, exists=False)
if extractor.has_transforms:
- exists = None if write_pbs else False
- transform_pb = extractor.get_transform_pb(document_path, exists)
- write_pbs.append(transform_pb)
+ field_transform_pbs = extractor.get_field_transform_pbs(document_path)
+ create_pb.update_transforms.extend(field_transform_pbs)
- return write_pbs
+ return [create_pb]
-def pbs_for_set_no_merge(document_path, document_data):
+def pbs_for_set_no_merge(document_path, document_data) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``set()`` methods.
Args:
@@ -620,55 +627,39 @@ def pbs_for_set_no_merge(document_path, document_data):
"specifying 'merge=True' or 'merge=[field_paths]'."
)
- # Conformance tests require send the 'update_pb' even if the document
- # contains only transforms.
- write_pbs = [extractor.get_update_pb(document_path)]
+ set_pb = extractor.get_update_pb(document_path)
if extractor.has_transforms:
- transform_pb = extractor.get_transform_pb(document_path)
- write_pbs.append(transform_pb)
+ field_transform_pbs = extractor.get_field_transform_pbs(document_path)
+ set_pb.update_transforms.extend(field_transform_pbs)
- return write_pbs
+ return [set_pb]
class DocumentExtractorForMerge(DocumentExtractor):
""" Break document data up into actual data and transforms.
"""
- def __init__(self, document_data):
+ def __init__(self, document_data) -> None:
super(DocumentExtractorForMerge, self).__init__(document_data)
self.data_merge = []
self.transform_merge = []
self.merge = []
- @property
- def has_updates(self):
- # for whatever reason, the conformance tests want to see the parent
- # of nested transform paths in the update mask
- # (see set-st-merge-nonleaf-alone.textproto)
- update_paths = set(self.data_merge)
-
- for transform_path in self.transform_paths:
- if len(transform_path.parts) > 1:
- parent_fp = FieldPath(*transform_path.parts[:-1])
- update_paths.add(parent_fp)
-
- return bool(update_paths)
-
- def _apply_merge_all(self):
+ def _apply_merge_all(self) -> None:
self.data_merge = sorted(self.field_paths + self.deleted_fields)
# TODO: other transforms
self.transform_merge = self.transform_paths
self.merge = sorted(self.data_merge + self.transform_paths)
- def _construct_merge_paths(self, merge):
+ def _construct_merge_paths(self, merge) -> Generator[Any, Any, None]:
for merge_field in merge:
if isinstance(merge_field, FieldPath):
yield merge_field
else:
yield FieldPath(*parse_field_path(merge_field))
- def _normalize_merge_paths(self, merge):
+ def _normalize_merge_paths(self, merge) -> list:
merge_paths = sorted(self._construct_merge_paths(merge))
# Raise if any merge path is a parent of another. Leverage sorting
@@ -688,7 +679,7 @@ def _normalize_merge_paths(self, merge):
return merge_paths
- def _apply_merge_paths(self, merge):
+ def _apply_merge_paths(self, merge) -> None:
if self.empty_document:
raise ValueError("Cannot merge specific fields with empty document.")
@@ -752,13 +743,15 @@ def _apply_merge_paths(self, merge):
if path in merged_transform_paths
}
- def apply_merge(self, merge):
+ def apply_merge(self, merge) -> None:
if merge is True: # merge all fields
self._apply_merge_all()
else:
self._apply_merge_paths(merge)
- def _get_update_mask(self, allow_empty_mask=False):
+ def _get_update_mask(
+ self, allow_empty_mask=False
+ ) -> Optional[types.common.DocumentMask]:
# Mask uses dotted / quoted paths.
mask_paths = [
field_path.to_api_repr()
@@ -766,11 +759,12 @@ def _get_update_mask(self, allow_empty_mask=False):
if field_path not in self.transform_merge
]
- if mask_paths or allow_empty_mask:
- return common_pb2.DocumentMask(field_paths=mask_paths)
+ return common.DocumentMask(field_paths=mask_paths)
-def pbs_for_set_with_merge(document_path, document_data, merge):
+def pbs_for_set_with_merge(
+ document_path, document_data, merge
+) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``set()`` methods.
Args:
@@ -787,27 +781,20 @@ def pbs_for_set_with_merge(document_path, document_data, merge):
extractor = DocumentExtractorForMerge(document_data)
extractor.apply_merge(merge)
- merge_empty = not document_data
-
- write_pbs = []
-
- if extractor.has_updates or merge_empty:
- write_pbs.append(
- extractor.get_update_pb(document_path, allow_empty_mask=merge_empty)
- )
+ set_pb = extractor.get_update_pb(document_path)
if extractor.transform_paths:
- transform_pb = extractor.get_transform_pb(document_path)
- write_pbs.append(transform_pb)
+ field_transform_pbs = extractor.get_field_transform_pbs(document_path)
+ set_pb.update_transforms.extend(field_transform_pbs)
- return write_pbs
+ return [set_pb]
class DocumentExtractorForUpdate(DocumentExtractor):
""" Break document data up into actual data and transforms.
"""
- def __init__(self, document_data):
+ def __init__(self, document_data) -> None:
super(DocumentExtractorForUpdate, self).__init__(document_data)
self.top_level_paths = sorted(
[FieldPath.from_string(key) for key in document_data]
@@ -828,19 +815,21 @@ def __init__(self, document_data):
"Cannot update with nest delete: {}".format(field_path)
)
- def _get_document_iterator(self, prefix_path):
+ def _get_document_iterator(
+ self, prefix_path: FieldPath
+ ) -> Generator[Tuple[Any, Any], Any, None]:
return extract_fields(self.document_data, prefix_path, expand_dots=True)
- def _get_update_mask(self, allow_empty_mask=False):
+ def _get_update_mask(self, allow_empty_mask=False) -> types.common.DocumentMask:
mask_paths = []
for field_path in self.top_level_paths:
if field_path not in self.transform_paths:
mask_paths.append(field_path.to_api_repr())
- return common_pb2.DocumentMask(field_paths=mask_paths)
+ return common.DocumentMask(field_paths=mask_paths)
-def pbs_for_update(document_path, field_updates, option):
+def pbs_for_update(document_path, field_updates, option) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``update()`` methods.
Args:
@@ -863,25 +852,17 @@ def pbs_for_update(document_path, field_updates, option):
if option is None: # Default is to use ``exists=True``.
option = ExistsOption(exists=True)
- write_pbs = []
-
- if extractor.field_paths or extractor.deleted_fields:
- update_pb = extractor.get_update_pb(document_path)
- option.modify_write(update_pb)
- write_pbs.append(update_pb)
+ update_pb = extractor.get_update_pb(document_path)
+ option.modify_write(update_pb)
if extractor.has_transforms:
- transform_pb = extractor.get_transform_pb(document_path)
- if not write_pbs:
- # NOTE: set the write option on the ``transform_pb`` only if there
- # is no ``update_pb``
- option.modify_write(transform_pb)
- write_pbs.append(transform_pb)
+ field_transform_pbs = extractor.get_field_transform_pbs(document_path)
+ update_pb.update_transforms.extend(field_transform_pbs)
- return write_pbs
+ return [update_pb]
-def pb_for_delete(document_path, option):
+def pb_for_delete(document_path, option) -> types.write.Write:
"""Make a ``Write`` protobuf for ``delete()`` methods.
Args:
@@ -894,7 +875,7 @@ def pb_for_delete(document_path, option):
google.cloud.firestore_v1.types.Write: A
``Write`` protobuf instance for the ``delete()``.
"""
- write_pb = write_pb2.Write(delete=document_path)
+ write_pb = write.Write(delete=document_path)
if option is not None:
option.modify_write(write_pb)
@@ -908,7 +889,7 @@ class ReadAfterWriteError(Exception):
"""
-def get_transaction_id(transaction, read_operation=True):
+def get_transaction_id(transaction, read_operation=True) -> Union[bytes, None]:
"""Get the transaction ID from a ``Transaction`` object.
Args:
@@ -938,7 +919,7 @@ def get_transaction_id(transaction, read_operation=True):
return transaction.id
-def metadata_with_prefix(prefix, **kw):
+def metadata_with_prefix(prefix: str, **kw) -> List[Tuple[str, str]]:
"""Create RPC metadata containing a prefix.
Args:
@@ -953,13 +934,13 @@ def metadata_with_prefix(prefix, **kw):
class WriteOption(object):
"""Option used to assert a condition on a write operation."""
- def modify_write(self, write_pb, no_create_msg=None):
+ def modify_write(self, write, no_create_msg=None) -> NoReturn:
"""Modify a ``Write`` protobuf based on the state of this write option.
This is a virtual method intended to be implemented by subclasses.
Args:
- write_pb (google.cloud.firestore_v1.types.Write): A
+ write (google.cloud.firestore_v1.types.Write): A
``Write`` protobuf instance to be modified with a precondition
determined by the state of this option.
no_create_msg (Optional[str]): A message to use to indicate that
@@ -985,7 +966,7 @@ class LastUpdateOption(WriteOption):
as part of a "write result" protobuf or directly.
"""
- def __init__(self, last_update_time):
+ def __init__(self, last_update_time) -> None:
self._last_update_time = last_update_time
def __eq__(self, other):
@@ -993,7 +974,7 @@ def __eq__(self, other):
return NotImplemented
return self._last_update_time == other._last_update_time
- def modify_write(self, write_pb, **unused_kwargs):
+ def modify_write(self, write, **unused_kwargs) -> None:
"""Modify a ``Write`` protobuf based on the state of this write option.
The ``last_update_time`` is added to ``write_pb`` as an "update time"
@@ -1008,7 +989,7 @@ def modify_write(self, write_pb, **unused_kwargs):
other subclasses that are unused here.
"""
current_doc = types.Precondition(update_time=self._last_update_time)
- write_pb.current_document.CopyFrom(current_doc)
+ write._pb.current_document.CopyFrom(current_doc._pb)
class ExistsOption(WriteOption):
@@ -1022,7 +1003,7 @@ class ExistsOption(WriteOption):
should already exist.
"""
- def __init__(self, exists):
+ def __init__(self, exists) -> None:
self._exists = exists
def __eq__(self, other):
@@ -1030,7 +1011,7 @@ def __eq__(self, other):
return NotImplemented
return self._exists == other._exists
- def modify_write(self, write_pb, **unused_kwargs):
+ def modify_write(self, write, **unused_kwargs) -> None:
"""Modify a ``Write`` protobuf based on the state of this write option.
If:
@@ -1039,11 +1020,24 @@ def modify_write(self, write_pb, **unused_kwargs):
* ``exists=False``, adds a precondition that requires non-existence
Args:
- write_pb (google.cloud.firestore_v1.types.Write): A
+ write (google.cloud.firestore_v1.types.Write): A
``Write`` protobuf instance to be modified with a precondition
determined by the state of this option.
unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
other subclasses that are unused here.
"""
current_doc = types.Precondition(exists=self._exists)
- write_pb.current_document.CopyFrom(current_doc)
+ write._pb.current_document.CopyFrom(current_doc._pb)
+
+
+def make_retry_timeout_kwargs(retry, timeout) -> dict:
+ """Helper fo API methods which take optional 'retry' / 'timeout' args."""
+ kwargs = {}
+
+ if retry is not gapic_v1.method.DEFAULT:
+ kwargs["retry"] = retry
+
+ if timeout is not None:
+ kwargs["timeout"] = timeout
+
+ return kwargs
diff --git a/google/cloud/firestore_v1/async_batch.py b/google/cloud/firestore_v1/async_batch.py
new file mode 100644
index 0000000000..8c13102d90
--- /dev/null
+++ b/google/cloud/firestore_v1/async_batch.py
@@ -0,0 +1,73 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for batch requests to the Google Cloud Firestore API."""
+
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_batch import BaseWriteBatch
+
+
+class AsyncWriteBatch(BaseWriteBatch):
+ """Accumulate write operations to be sent in a batch.
+
+ This has the same set of methods for write operations that
+ :class:`~google.cloud.firestore_v1.async_document.AsyncDocumentReference` does,
+ e.g. :meth:`~google.cloud.firestore_v1.async_document.AsyncDocumentReference.create`.
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.async_client.AsyncClient`):
+ The client that created this batch.
+ """
+
+ def __init__(self, client) -> None:
+ super(AsyncWriteBatch, self).__init__(client=client)
+
+ async def commit(
+ self, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None,
+ ) -> list:
+ """Commit the changes accumulated in this batch.
+
+ Args:
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ List[:class:`google.cloud.proto.firestore.v1.write.WriteResult`, ...]:
+ The write results corresponding to the changes committed, returned
+ in the same order as the changes were applied to this batch. A
+ write result contains an ``update_time`` field.
+ """
+ request, kwargs = self._prep_commit(retry, timeout)
+
+ commit_response = await self._client._firestore_api.commit(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ self._write_pbs = []
+ self.write_results = results = list(commit_response.write_results)
+ self.commit_time = commit_response.commit_time
+
+ return results
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ if exc_type is None:
+ await self.commit()
diff --git a/google/cloud/firestore_v1/async_client.py b/google/cloud/firestore_v1/async_client.py
new file mode 100644
index 0000000000..512025f242
--- /dev/null
+++ b/google/cloud/firestore_v1/async_client.py
@@ -0,0 +1,316 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Firestore API.
+
+This is the base from which all interactions with the API occur.
+
+In the hierarchy of API concepts
+
+* a :class:`~google.cloud.firestore_v1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1.async_collection.AsyncCollectionReference`
+* a :class:`~google.cloud.firestore_v1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1.async_document.AsyncDocumentReference`
+"""
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_client import (
+ BaseClient,
+ DEFAULT_DATABASE,
+ _CLIENT_INFO,
+ _parse_batch_get, # type: ignore
+ _path_helper,
+)
+
+from google.cloud.firestore_v1.async_query import AsyncCollectionGroup
+from google.cloud.firestore_v1.async_batch import AsyncWriteBatch
+from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+from google.cloud.firestore_v1.async_document import (
+ AsyncDocumentReference,
+ DocumentSnapshot,
+)
+from google.cloud.firestore_v1.async_transaction import AsyncTransaction
+from google.cloud.firestore_v1.services.firestore import (
+ async_client as firestore_client,
+)
+from google.cloud.firestore_v1.services.firestore.transports import (
+ grpc_asyncio as firestore_grpc_transport,
+)
+from typing import Any, AsyncGenerator, Iterable, Tuple
+
+
+class AsyncClient(BaseClient):
+ """Client for interacting with Google Cloud Firestore API.
+
+ .. note::
+
+ Since the Cloud Firestore API requires the gRPC transport, no
+ ``_http`` argument is accepted by this class.
+
+ Args:
+ project (Optional[str]): The project which the client acts on behalf
+ of. If not passed, falls back to the default inferred
+ from the environment.
+ credentials (Optional[~google.auth.credentials.Credentials]): The
+ OAuth2 Credentials to use for this client. If not passed, falls
+ back to the default inferred from the environment.
+ database (Optional[str]): The database name that the client targets.
+ For now, :attr:`DEFAULT_DATABASE` (the default value) is the
+ only valid database.
+ client_info (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
+ The client info used to send a user-agent string along with API
+ requests. If ``None``, then default info will be used. Generally,
+ you only need to set this if you're developing your own library
+ or partner tool.
+ client_options (Union[dict, google.api_core.client_options.ClientOptions]):
+ Client options used to set user options on the client. API Endpoint
+ should be set through client_options.
+ """
+
+ def __init__(
+ self,
+ project=None,
+ credentials=None,
+ database=DEFAULT_DATABASE,
+ client_info=_CLIENT_INFO,
+ client_options=None,
+ ) -> None:
+ super(AsyncClient, self).__init__(
+ project=project,
+ credentials=credentials,
+ database=database,
+ client_info=client_info,
+ client_options=client_options,
+ )
+
+ @property
+ def _firestore_api(self):
+ """Lazy-loading getter GAPIC Firestore API.
+ Returns:
+ :class:`~google.cloud.gapic.firestore.v1`.async_firestore_client.FirestoreAsyncClient:
+ The GAPIC client with the credentials of the current client.
+ """
+ return self._firestore_api_helper(
+ firestore_grpc_transport.FirestoreGrpcAsyncIOTransport,
+ firestore_client.FirestoreAsyncClient,
+ firestore_client,
+ )
+
+ @property
+ def _target(self):
+ """Return the target (where the API is).
+ Eg. "firestore.googleapis.com"
+
+ Returns:
+ str: The location of the API.
+ """
+ return self._target_helper(firestore_client.FirestoreAsyncClient)
+
+ def collection(self, *collection_path: Tuple[str]) -> AsyncCollectionReference:
+ """Get a reference to a collection.
+
+ For a top-level collection:
+
+ .. code-block:: python
+
+ >>> client.collection('top')
+
+ For a sub-collection:
+
+ .. code-block:: python
+
+ >>> client.collection('mydocs/doc/subcol')
+ >>> # is the same as
+ >>> client.collection('mydocs', 'doc', 'subcol')
+
+ Sub-collections can be nested deeper in a similar fashion.
+
+ Args:
+ collection_path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path to a collection
+ * A tuple of collection path segments
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.async_collection.AsyncCollectionReference`:
+ A reference to a collection in the Firestore database.
+ """
+ return AsyncCollectionReference(*_path_helper(collection_path), client=self)
+
+ def collection_group(self, collection_id: str) -> AsyncCollectionGroup:
+ """
+ Creates and returns a new AsyncQuery that includes all documents in the
+ database that are contained in a collection or subcollection with the
+ given collection_id.
+
+ .. code-block:: python
+
+ >>> query = client.collection_group('mygroup')
+
+ Args:
+ collection_id (str) Identifies the collections to query over.
+
+ Every collection or subcollection with this ID as the last segment of its
+ path will be included. Cannot contain a slash.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.async_query.AsyncCollectionGroup`:
+ The created AsyncQuery.
+ """
+ return AsyncCollectionGroup(self._get_collection_reference(collection_id))
+
+ def document(self, *document_path: Tuple[str]) -> AsyncDocumentReference:
+ """Get a reference to a document in a collection.
+
+ For a top-level document:
+
+ .. code-block:: python
+
+ >>> client.document('collek/shun')
+ >>> # is the same as
+ >>> client.document('collek', 'shun')
+
+ For a document in a sub-collection:
+
+ .. code-block:: python
+
+ >>> client.document('mydocs/doc/subcol/child')
+ >>> # is the same as
+ >>> client.document('mydocs', 'doc', 'subcol', 'child')
+
+ Documents in sub-collections can be nested deeper in a similar fashion.
+
+ Args:
+ document_path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path to a document
+ * A tuple of document path segments
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.AsyncDocumentReference`:
+ A reference to a document in a collection.
+ """
+ return AsyncDocumentReference(
+ *self._document_path_helper(*document_path), client=self
+ )
+
+ async def get_all(
+ self,
+ references: list,
+ field_paths: Iterable[str] = None,
+ transaction=None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator[DocumentSnapshot, Any]:
+ """Retrieve a batch of documents.
+
+ .. note::
+
+ Documents returned by this method are not guaranteed to be
+ returned in the same order that they are given in ``references``.
+
+ .. note::
+
+ If multiple ``references`` refer to the same document, the server
+ will only return one result.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ references (List[.AsyncDocumentReference, ...]): Iterable of document
+ references to be retrieved.
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results. If
+ no value is provided, all fields will be returned.
+ transaction (Optional[:class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction`]):
+ An existing transaction that these ``references`` will be
+ retrieved in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ request, reference_map, kwargs = self._prep_get_all(
+ references, field_paths, transaction, retry, timeout
+ )
+
+ response_iterator = await self._firestore_api.batch_get_documents(
+ request=request, metadata=self._rpc_metadata, **kwargs,
+ )
+
+ async for get_doc_response in response_iterator:
+ yield _parse_batch_get(get_doc_response, reference_map, self)
+
+ async def collections(
+ self, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None,
+ ) -> AsyncGenerator[AsyncCollectionReference, Any]:
+ """List top-level collections of the client's database.
+
+ Args:
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ Sequence[:class:`~google.cloud.firestore_v1.async_collection.AsyncCollectionReference`]:
+ iterator of subcollections of the current document.
+ """
+ request, kwargs = self._prep_collections(retry, timeout)
+ iterator = await self._firestore_api.list_collection_ids(
+ request=request, metadata=self._rpc_metadata, **kwargs,
+ )
+
+ async for collection_id in iterator:
+ yield self.collection(collection_id)
+
+ def batch(self) -> AsyncWriteBatch:
+ """Get a batch instance from this client.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.async_batch.AsyncWriteBatch`:
+ A "write" batch to be used for accumulating document changes and
+ sending the changes all at once.
+ """
+ return AsyncWriteBatch(self)
+
+ def transaction(self, **kwargs) -> AsyncTransaction:
+ """Get a transaction that uses this client.
+
+ See :class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction` for
+ more information on transactions and the constructor arguments.
+
+ Args:
+ kwargs (Dict[str, Any]): The keyword arguments (other than
+ ``client``) to pass along to the
+ :class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction`
+ constructor.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction`:
+ A transaction attached to this client.
+ """
+ return AsyncTransaction(self, **kwargs)
diff --git a/google/cloud/firestore_v1/async_collection.py b/google/cloud/firestore_v1/async_collection.py
new file mode 100644
index 0000000000..e3842f03e9
--- /dev/null
+++ b/google/cloud/firestore_v1/async_collection.py
@@ -0,0 +1,216 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing collections for the Google Cloud Firestore API."""
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_collection import (
+ BaseCollectionReference,
+ _item_to_document_ref,
+)
+from google.cloud.firestore_v1 import (
+ async_query,
+ async_document,
+)
+
+from google.cloud.firestore_v1.document import DocumentReference
+
+from typing import AsyncIterator
+from typing import Any, AsyncGenerator, Tuple
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.transaction import Transaction
+
+
+class AsyncCollectionReference(BaseCollectionReference):
+ """A reference to a collection in a Firestore database.
+
+ The collection may already exist or this class can facilitate creation
+ of documents within the collection.
+
+ Args:
+ path (Tuple[str, ...]): The components in the collection path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection.
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1.client.Client` if provided. It
+ represents the client that created this collection reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ def __init__(self, *path, **kwargs) -> None:
+ super(AsyncCollectionReference, self).__init__(*path, **kwargs)
+
+ def _query(self) -> async_query.AsyncQuery:
+ """Query factory.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`
+ """
+ return async_query.AsyncQuery(self)
+
+ async def add(
+ self,
+ document_data: dict,
+ document_id: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Tuple[Any, Any]:
+ """Create a document in the Firestore database with the provided data.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ creating the document.
+ document_id (Optional[str]): The document identifier within the
+ current collection. If not provided, an ID will be
+ automatically assigned by the server (the assigned ID will be
+ a random 20 character string composed of digits,
+ uppercase and lowercase letters).
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ Tuple[:class:`google.protobuf.timestamp_pb2.Timestamp`, \
+ :class:`~google.cloud.firestore_v1.async_document.AsyncDocumentReference`]:
+ Pair of
+
+ * The ``update_time`` when the document was created/overwritten.
+ * A document reference for the created document.
+
+ Raises:
+ ~google.cloud.exceptions.Conflict: If ``document_id`` is provided
+ and the document already exists.
+ """
+ document_ref, kwargs = self._prep_add(
+ document_data, document_id, retry, timeout,
+ )
+ write_result = await document_ref.create(document_data, **kwargs)
+ return write_result.update_time, document_ref
+
+ async def list_documents(
+ self,
+ page_size: int = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator[DocumentReference, None]:
+ """List all subdocuments of the current collection.
+
+ Args:
+ page_size (Optional[int]]): The maximum number of documents
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]:
+ iterator of subdocuments of the current collection. If the
+ collection does not exist at the time of `snapshot`, the
+ iterator will be empty
+ """
+ request, kwargs = self._prep_list_documents(page_size, retry, timeout)
+
+ iterator = await self._client._firestore_api.list_documents(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+ async for i in iterator:
+ yield _item_to_document_ref(self, i)
+
+ async def get(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> list:
+ """Read the documents in this collection.
+
+ This sends a ``RunQuery`` RPC and returns a list of documents
+ returned in the stream of ``RunQueryResponse`` messages.
+
+ Args:
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Returns:
+ list: The documents in this collection that match the query.
+ """
+ query, kwargs = self._prep_get_or_stream(retry, timeout)
+
+ return await query.get(transaction=transaction, **kwargs)
+
+ async def stream(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncIterator[async_document.DocumentSnapshot]:
+ """Read the documents in this collection.
+
+ This sends a ``RunQuery`` RPC and then returns an iterator which
+ consumes each document returned in the stream of ``RunQueryResponse``
+ messages.
+
+ .. note::
+
+ The underlying stream of responses will time out after
+ the ``max_rpc_timeout_millis`` value set in the GAPIC
+ client configuration for the ``RunQuery`` API. Snapshots
+ not consumed from the iterator before that point will be lost.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.\
+ Transaction`]):
+ An existing transaction that the query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Yields:
+ :class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
+ The next document that fulfills the query.
+ """
+ query, kwargs = self._prep_get_or_stream(retry, timeout)
+
+ async for d in query.stream(transaction=transaction, **kwargs):
+ yield d # pytype: disable=name-error
diff --git a/google/cloud/firestore_v1/async_document.py b/google/cloud/firestore_v1/async_document.py
new file mode 100644
index 0000000000..11dec64b0e
--- /dev/null
+++ b/google/cloud/firestore_v1/async_document.py
@@ -0,0 +1,413 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing documents for the Google Cloud Firestore API."""
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_document import (
+ BaseDocumentReference,
+ DocumentSnapshot,
+ _first_write_result,
+)
+
+from google.api_core import exceptions # type: ignore
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1.types import write
+from google.protobuf import timestamp_pb2
+from typing import Any, AsyncGenerator, Coroutine, Iterable, Union
+
+
+class AsyncDocumentReference(BaseDocumentReference):
+ """A reference to a document in a Firestore database.
+
+ The document may already exist or can be created by this class.
+
+ Args:
+ path (Tuple[str, ...]): The components in the document path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection (as well as the base document).
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1.client.Client`. It represents
+ the client that created this document reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ def __init__(self, *path, **kwargs) -> None:
+ super(AsyncDocumentReference, self).__init__(*path, **kwargs)
+
+ async def create(
+ self,
+ document_data: dict,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> write.WriteResult:
+ """Create the current document in the Firestore database.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ creating a document.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.types.WriteResult`:
+ The write result corresponding to the committed document.
+ A write result contains an ``update_time`` field.
+
+ Raises:
+ :class:`~google.cloud.exceptions.Conflict`:
+ If the document already exists.
+ """
+ batch, kwargs = self._prep_create(document_data, retry, timeout)
+ write_results = await batch.commit(**kwargs)
+ return _first_write_result(write_results)
+
+ async def set(
+ self,
+ document_data: dict,
+ merge: bool = False,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> write.WriteResult:
+ """Replace the current document in the Firestore database.
+
+ A write ``option`` can be specified to indicate preconditions of
+ the "set" operation. If no ``option`` is specified and this document
+ doesn't exist yet, this method will create it.
+
+ Overwrites all content for the document with the fields in
+ ``document_data``. This method performs almost the same functionality
+ as :meth:`create`. The only difference is that this method doesn't
+ make any requirements on the existence of the document (unless
+ ``option`` is used), whereas as :meth:`create` will fail if the
+ document already exists.
+
+ Args:
+ document_data (dict): Property names and values to use for
+ replacing a document.
+ merge (Optional[bool] or Optional[List]):
+ If True, apply merging instead of overwriting the state
+ of the document.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.types.WriteResult`:
+ The write result corresponding to the committed document. A write
+ result contains an ``update_time`` field.
+ """
+ batch, kwargs = self._prep_set(document_data, merge, retry, timeout)
+ write_results = await batch.commit(**kwargs)
+ return _first_write_result(write_results)
+
+ async def update(
+ self,
+ field_updates: dict,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> write.WriteResult:
+ """Update an existing document in the Firestore database.
+
+ By default, this method verifies that the document exists on the
+ server before making updates. A write ``option`` can be specified to
+ override these preconditions.
+
+ Each key in ``field_updates`` can either be a field name or a
+ **field path** (For more information on **field paths**, see
+ :meth:`~google.cloud.firestore_v1.client.Client.field_path`.) To
+ illustrate this, consider a document with
+
+ .. code-block:: python
+
+ >>> snapshot = await document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ },
+ 'other': True,
+ }
+
+ stored on the server. If the field name is used in the update:
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo': {
+ ... 'quux': 800,
+ ... },
+ ... }
+ >>> await document.update(field_updates)
+
+ then all of ``foo`` will be overwritten on the server and the new
+ value will be
+
+ .. code-block:: python
+
+ >>> snapshot = await document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'quux': 800,
+ },
+ 'other': True,
+ }
+
+ On the other hand, if a ``.``-delimited **field path** is used in the
+ update:
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo.quux': 800,
+ ... }
+ >>> await document.update(field_updates)
+
+ then only ``foo.quux`` will be updated on the server and the
+ field ``foo.bar`` will remain intact:
+
+ .. code-block:: python
+
+ >>> snapshot = await document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ 'quux': 800,
+ },
+ 'other': True,
+ }
+
+ .. warning::
+
+ A **field path** can only be used as a top-level key in
+ ``field_updates``.
+
+ To delete / remove a field from an existing document, use the
+ :attr:`~google.cloud.firestore_v1.transforms.DELETE_FIELD` sentinel.
+ So with the example above, sending
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'other': firestore.DELETE_FIELD,
+ ... }
+ >>> await document.update(field_updates)
+
+ would update the value on the server to:
+
+ .. code-block:: python
+
+ >>> snapshot = await document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ },
+ }
+
+ To set a field to the current time on the server when the
+ update is received, use the
+ :attr:`~google.cloud.firestore_v1.transforms.SERVER_TIMESTAMP`
+ sentinel.
+ Sending
+
+ .. code-block:: python
+
+ >>> field_updates = {
+ ... 'foo.now': firestore.SERVER_TIMESTAMP,
+ ... }
+ >>> await document.update(field_updates)
+
+ would update the value on the server to:
+
+ .. code-block:: python
+
+ >>> snapshot = await document.get()
+ >>> snapshot.to_dict()
+ {
+ 'foo': {
+ 'bar': 'baz',
+ 'now': datetime.datetime(2012, ...),
+ },
+ 'other': True,
+ }
+
+ Args:
+ field_updates (dict): Field names or paths to update and values
+ to update with.
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.types.WriteResult`:
+ The write result corresponding to the updated document. A write
+ result contains an ``update_time`` field.
+
+ Raises:
+ ~google.cloud.exceptions.NotFound: If the document does not exist.
+ """
+ batch, kwargs = self._prep_update(field_updates, option, retry, timeout)
+ write_results = await batch.commit(**kwargs)
+ return _first_write_result(write_results)
+
+ async def delete(
+ self,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> timestamp_pb2.Timestamp:
+ """Delete the current document in the Firestore database.
+
+ Args:
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ :class:`google.protobuf.timestamp_pb2.Timestamp`:
+ The time that the delete request was received by the server.
+ If the document did not exist when the delete was sent (i.e.
+ nothing was deleted), this method will still succeed and will
+ still return the time that the request was received by the server.
+ """
+ request, kwargs = self._prep_delete(option, retry, timeout)
+
+ commit_response = await self._client._firestore_api.commit(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ return commit_response.commit_time
+
+ async def get(
+ self,
+ field_paths: Iterable[str] = None,
+ transaction=None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Union[DocumentSnapshot, Coroutine[Any, Any, DocumentSnapshot]]:
+ """Retrieve a snapshot of the current document.
+
+ See :meth:`~google.cloud.firestore_v1.base_client.BaseClient.field_path` for
+ more information on **field paths**.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ field_paths (Optional[Iterable[str, ...]]): An iterable of field
+ paths (``.``-delimited list of field names) to use as a
+ projection of document fields in the returned results. If
+ no value is provided, all fields will be returned.
+ transaction (Optional[:class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction`]):
+ An existing transaction that this reference
+ will be retrieved in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`:
+ A snapshot of the current document. If the document does not
+ exist at the time of the snapshot is taken, the snapshot's
+ :attr:`reference`, :attr:`data`, :attr:`update_time`, and
+ :attr:`create_time` attributes will all be ``None`` and
+ its :attr:`exists` attribute will be ``False``.
+ """
+ request, kwargs = self._prep_get(field_paths, transaction, retry, timeout)
+
+ firestore_api = self._client._firestore_api
+ try:
+ document_pb = await firestore_api.get_document(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+ except exceptions.NotFound:
+ data = None
+ exists = False
+ create_time = None
+ update_time = None
+ else:
+ data = _helpers.decode_dict(document_pb.fields, self._client)
+ exists = True
+ create_time = document_pb.create_time
+ update_time = document_pb.update_time
+
+ return DocumentSnapshot(
+ reference=self,
+ data=data,
+ exists=exists,
+ read_time=None, # No server read_time available
+ create_time=create_time,
+ update_time=update_time,
+ )
+
+ async def collections(
+ self,
+ page_size: int = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator:
+ """List subcollections of the current document.
+
+ Args:
+ page_size (Optional[int]]): The maximum number of collections
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Returns:
+ Sequence[:class:`~google.cloud.firestore_v1.async_collection.AsyncCollectionReference`]:
+ iterator of subcollections of the current document. If the
+ document does not exist at the time of `snapshot`, the
+ iterator will be empty
+ """
+ request, kwargs = self._prep_collections(page_size, retry, timeout)
+
+ iterator = await self._client._firestore_api.list_collection_ids(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ async for collection_id in iterator:
+ yield self.collection(collection_id)
diff --git a/google/cloud/firestore_v1/async_query.py b/google/cloud/firestore_v1/async_query.py
new file mode 100644
index 0000000000..f772194e85
--- /dev/null
+++ b/google/cloud/firestore_v1/async_query.py
@@ -0,0 +1,302 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing queries for the Google Cloud Firestore API.
+
+A :class:`~google.cloud.firestore_v1.query.Query` can be created directly from
+a :class:`~google.cloud.firestore_v1.collection.Collection` and that can be
+a more common way to create a query than direct usage of the constructor.
+"""
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_query import (
+ BaseCollectionGroup,
+ BaseQuery,
+ QueryPartition,
+ _query_response_to_snapshot,
+ _collection_group_query_response_to_snapshot,
+ _enum_from_direction,
+)
+
+from google.cloud.firestore_v1 import async_document
+from typing import AsyncGenerator
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.transaction import Transaction
+
+
+class AsyncQuery(BaseQuery):
+ """Represents a query to the Firestore API.
+
+ Instances of this class are considered immutable: all methods that
+ would modify an instance instead return a new instance.
+
+ Args:
+ parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ The collection that this query applies to.
+ projection (Optional[:class:`google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.Projection`]):
+ A projection of document fields to limit the query results to.
+ field_filters (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.FieldFilter`, ...]]):
+ The filters to be applied in the query.
+ orders (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.Order`, ...]]):
+ The "order by" entries to use in the query.
+ limit (Optional[int]):
+ The maximum number of documents the query is allowed to return.
+ offset (Optional[int]):
+ The number of results to skip.
+ start_at (Optional[Tuple[dict, bool]]):
+ Two-tuple of :
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * an ``after`` flag
+
+ The fields and the flag combine to form a cursor used as
+ a starting point in a query result set. If the ``after``
+ flag is :data:`True`, the results will start just after any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ end_at (Optional[Tuple[dict, bool]]):
+ Two-tuple of:
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * a ``before`` flag
+
+ The fields and the flag combine to form a cursor used as
+ an ending point in a query result set. If the ``before``
+ flag is :data:`True`, the results will end just before any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ all_descendants (Optional[bool]):
+ When false, selects only collections that are immediate children
+ of the `parent` specified in the containing `RunQueryRequest`.
+ When true, selects all descendant collections.
+ """
+
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ limit_to_last=False,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ all_descendants=False,
+ ) -> None:
+ super(AsyncQuery, self).__init__(
+ parent=parent,
+ projection=projection,
+ field_filters=field_filters,
+ orders=orders,
+ limit=limit,
+ limit_to_last=limit_to_last,
+ offset=offset,
+ start_at=start_at,
+ end_at=end_at,
+ all_descendants=all_descendants,
+ )
+
+ async def get(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> list:
+ """Read the documents in the collection that match this query.
+
+ This sends a ``RunQuery`` RPC and returns a list of documents
+ returned in the stream of ``RunQueryResponse`` messages.
+
+ Args:
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Returns:
+ list: The documents in the collection that match this query.
+ """
+ is_limited_to_last = self._limit_to_last
+
+ if self._limit_to_last:
+ # In order to fetch up to `self._limit` results from the end of the
+ # query flip the defined ordering on the query to start from the
+ # end, retrieving up to `self._limit` results from the backend.
+ for order in self._orders:
+ order.direction = _enum_from_direction(
+ self.DESCENDING
+ if order.direction == self.ASCENDING
+ else self.ASCENDING
+ )
+ self._limit_to_last = False
+
+ result = self.stream(transaction=transaction, retry=retry, timeout=timeout)
+ result = [d async for d in result]
+ if is_limited_to_last:
+ result = list(reversed(result))
+
+ return result
+
+ async def stream(
+ self,
+ transaction=None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator[async_document.DocumentSnapshot, None]:
+ """Read the documents in the collection that match this query.
+
+ This sends a ``RunQuery`` RPC and then returns an iterator which
+ consumes each document returned in the stream of ``RunQueryResponse``
+ messages.
+
+ .. note::
+
+ The underlying stream of responses will time out after
+ the ``max_rpc_timeout_millis`` value set in the GAPIC
+ client configuration for the ``RunQuery`` API. Snapshots
+ not consumed from the iterator before that point will be lost.
+
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+
+ Args:
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Yields:
+ :class:`~google.cloud.firestore_v1.async_document.DocumentSnapshot`:
+ The next document that fulfills the query.
+ """
+ request, expected_prefix, kwargs = self._prep_stream(
+ transaction, retry, timeout,
+ )
+
+ response_iterator = await self._client._firestore_api.run_query(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ async for response in response_iterator:
+ if self._all_descendants:
+ snapshot = _collection_group_query_response_to_snapshot(
+ response, self._parent
+ )
+ else:
+ snapshot = _query_response_to_snapshot(
+ response, self._parent, expected_prefix
+ )
+ if snapshot is not None:
+ yield snapshot
+
+
+class AsyncCollectionGroup(AsyncQuery, BaseCollectionGroup):
+ """Represents a Collection Group in the Firestore API.
+
+ This is a specialization of :class:`.AsyncQuery` that includes all documents in the
+ database that are contained in a collection or subcollection of the given
+ parent.
+
+ Args:
+ parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ The collection that this query applies to.
+ """
+
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ limit_to_last=False,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ all_descendants=True,
+ ) -> None:
+ super(AsyncCollectionGroup, self).__init__(
+ parent=parent,
+ projection=projection,
+ field_filters=field_filters,
+ orders=orders,
+ limit=limit,
+ limit_to_last=limit_to_last,
+ offset=offset,
+ start_at=start_at,
+ end_at=end_at,
+ all_descendants=all_descendants,
+ )
+
+ @staticmethod
+ def _get_query_class():
+ return AsyncQuery
+
+ async def get_partitions(
+ self,
+ partition_count,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator[QueryPartition, None]:
+ """Partition a query for parallelization.
+
+ Partitions a query by returning partition cursors that can be used to run the
+ query in parallel. The returned partition cursors are split points that can be
+ used as starting/end points for the query results.
+
+ Args:
+ partition_count (int): The desired maximum number of partition points. The
+ number must be strictly positive. The actual number of partitions
+ returned may be fewer.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+ """
+ request, kwargs = self._prep_get_partitions(partition_count, retry, timeout)
+ pager = await self._client._firestore_api.partition_query(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ start_at = None
+ async for cursor_pb in pager:
+ cursor = self._client.document(cursor_pb.values[0].reference_value)
+ yield QueryPartition(self, start_at, cursor)
+ start_at = cursor
+
+ yield QueryPartition(self, start_at, None)
diff --git a/google/cloud/firestore_v1/async_transaction.py b/google/cloud/firestore_v1/async_transaction.py
new file mode 100644
index 0000000000..aae40b4682
--- /dev/null
+++ b/google/cloud/firestore_v1/async_transaction.py
@@ -0,0 +1,414 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for applying Google Cloud Firestore changes in a transaction."""
+
+
+import asyncio
+import random
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_transaction import (
+ _BaseTransactional,
+ BaseTransaction,
+ MAX_ATTEMPTS,
+ _CANT_BEGIN,
+ _CANT_ROLLBACK,
+ _CANT_COMMIT,
+ _WRITE_READ_ONLY,
+ _INITIAL_SLEEP,
+ _MAX_SLEEP,
+ _MULTIPLIER,
+ _EXCEED_ATTEMPTS_TEMPLATE,
+)
+
+from google.api_core import exceptions # type: ignore
+from google.cloud.firestore_v1 import async_batch
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import types
+
+from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+from google.cloud.firestore_v1.async_document import DocumentSnapshot
+from google.cloud.firestore_v1.async_query import AsyncQuery
+from typing import Any, AsyncGenerator, Callable, Coroutine
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.client import Client
+
+
+class AsyncTransaction(async_batch.AsyncWriteBatch, BaseTransaction):
+ """Accumulate read-and-write operations to be sent in a transaction.
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ The client that created this transaction.
+ max_attempts (Optional[int]): The maximum number of attempts for
+ the transaction (i.e. allowing retries). Defaults to
+ :attr:`~google.cloud.firestore_v1.transaction.MAX_ATTEMPTS`.
+ read_only (Optional[bool]): Flag indicating if the transaction
+ should be read-only or should allow writes. Defaults to
+ :data:`False`.
+ """
+
+ def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False) -> None:
+ super(AsyncTransaction, self).__init__(client)
+ BaseTransaction.__init__(self, max_attempts, read_only)
+
+ def _add_write_pbs(self, write_pbs: list) -> None:
+ """Add `Write`` protobufs to this transaction.
+
+ Args:
+ write_pbs (List[google.cloud.proto.firestore.v1.\
+ write.Write]): A list of write protobufs to be added.
+
+ Raises:
+ ValueError: If this transaction is read-only.
+ """
+ if self._read_only:
+ raise ValueError(_WRITE_READ_ONLY)
+
+ super(AsyncTransaction, self)._add_write_pbs(write_pbs)
+
+ async def _begin(self, retry_id: bytes = None) -> None:
+ """Begin the transaction.
+
+ Args:
+ retry_id (Optional[bytes]): Transaction ID of a transaction to be
+ retried.
+
+ Raises:
+ ValueError: If the current transaction has already begun.
+ """
+ if self.in_progress:
+ msg = _CANT_BEGIN.format(self._id)
+ raise ValueError(msg)
+
+ transaction_response = await self._client._firestore_api.begin_transaction(
+ request={
+ "database": self._client._database_string,
+ "options": self._options_protobuf(retry_id),
+ },
+ metadata=self._client._rpc_metadata,
+ )
+ self._id = transaction_response.transaction
+
+ async def _rollback(self) -> None:
+ """Roll back the transaction.
+
+ Raises:
+ ValueError: If no transaction is in progress.
+ """
+ if not self.in_progress:
+ raise ValueError(_CANT_ROLLBACK)
+
+ try:
+ # NOTE: The response is just ``google.protobuf.Empty``.
+ await self._client._firestore_api.rollback(
+ request={
+ "database": self._client._database_string,
+ "transaction": self._id,
+ },
+ metadata=self._client._rpc_metadata,
+ )
+ finally:
+ self._clean_up()
+
+ async def _commit(self) -> list:
+ """Transactionally commit the changes accumulated.
+
+ Returns:
+ List[:class:`google.cloud.proto.firestore.v1.write.WriteResult`, ...]:
+ The write results corresponding to the changes committed, returned
+ in the same order as the changes were applied to this transaction.
+ A write result contains an ``update_time`` field.
+
+ Raises:
+ ValueError: If no transaction is in progress.
+ """
+ if not self.in_progress:
+ raise ValueError(_CANT_COMMIT)
+
+ commit_response = await _commit_with_retry(
+ self._client, self._write_pbs, self._id
+ )
+
+ self._clean_up()
+ return list(commit_response.write_results)
+
+ async def get_all(
+ self,
+ references: list,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator[DocumentSnapshot, Any]:
+ """Retrieves multiple documents from Firestore.
+
+ Args:
+ references (List[.AsyncDocumentReference, ...]): Iterable of document
+ references to be retrieved.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ return await self._client.get_all(references, transaction=self, **kwargs)
+
+ async def get(
+ self,
+ ref_or_query,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> AsyncGenerator[DocumentSnapshot, Any]:
+ """
+ Retrieve a document or a query result from the database.
+
+ Args:
+ ref_or_query The document references or query object to return.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
+ Yields:
+ .DocumentSnapshot: The next document snapshot that fulfills the
+ query, or :data:`None` if the document does not exist.
+ """
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ if isinstance(ref_or_query, AsyncDocumentReference):
+ return await self._client.get_all(
+ [ref_or_query], transaction=self, **kwargs
+ )
+ elif isinstance(ref_or_query, AsyncQuery):
+ return await ref_or_query.stream(transaction=self, **kwargs)
+ else:
+ raise ValueError(
+ 'Value for argument "ref_or_query" must be a AsyncDocumentReference or a AsyncQuery.'
+ )
+
+
+class _AsyncTransactional(_BaseTransactional):
+ """Provide a callable object to use as a transactional decorater.
+
+ This is surfaced via
+ :func:`~google.cloud.firestore_v1.async_transaction.transactional`.
+
+ Args:
+ to_wrap (Coroutine[[:class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction`, ...], Any]):
+ A coroutine that should be run (and retried) in a transaction.
+ """
+
+ def __init__(self, to_wrap) -> None:
+ super(_AsyncTransactional, self).__init__(to_wrap)
+
+ async def _pre_commit(
+ self, transaction: AsyncTransaction, *args, **kwargs
+ ) -> Coroutine:
+ """Begin transaction and call the wrapped coroutine.
+
+ If the coroutine raises an exception, the transaction will be rolled
+ back. If not, the transaction will be "ready" for ``Commit`` (i.e.
+ it will have staged writes).
+
+ Args:
+ transaction
+ (:class:`~google.cloud.firestore_v1.async_transaction.AsyncTransaction`):
+ A transaction to execute the coroutine within.
+ args (Tuple[Any, ...]): The extra positional arguments to pass
+ along to the wrapped coroutine.
+ kwargs (Dict[str, Any]): The extra keyword arguments to pass
+ along to the wrapped coroutine.
+
+ Returns:
+ Any: result of the wrapped coroutine.
+
+ Raises:
+ Exception: Any failure caused by ``to_wrap``.
+ """
+ # Force the ``transaction`` to be not "in progress".
+ transaction._clean_up()
+ await transaction._begin(retry_id=self.retry_id)
+
+ # Update the stored transaction IDs.
+ self.current_id = transaction._id
+ if self.retry_id is None:
+ self.retry_id = self.current_id
+ try:
+ return await self.to_wrap(transaction, *args, **kwargs)
+ except: # noqa
+ # NOTE: If ``rollback`` fails this will lose the information
+ # from the original failure.
+ await transaction._rollback()
+ raise
+
+ async def _maybe_commit(self, transaction: AsyncTransaction) -> bool:
+ """Try to commit the transaction.
+
+ If the transaction is read-write and the ``Commit`` fails with the
+ ``ABORTED`` status code, it will be retried. Any other failure will
+ not be caught.
+
+ Args:
+ transaction
+ (:class:`~google.cloud.firestore_v1.transaction.Transaction`):
+ The transaction to be ``Commit``-ed.
+
+ Returns:
+ bool: Indicating if the commit succeeded.
+ """
+ try:
+ await transaction._commit()
+ return True
+ except exceptions.GoogleAPICallError as exc:
+ if transaction._read_only:
+ raise
+
+ if isinstance(exc, exceptions.Aborted):
+ # If a read-write transaction returns ABORTED, retry.
+ return False
+ else:
+ raise
+
+ async def __call__(self, transaction, *args, **kwargs):
+ """Execute the wrapped callable within a transaction.
+
+ Args:
+ transaction
+ (:class:`~google.cloud.firestore_v1.transaction.Transaction`):
+ A transaction to execute the callable within.
+ args (Tuple[Any, ...]): The extra positional arguments to pass
+ along to the wrapped callable.
+ kwargs (Dict[str, Any]): The extra keyword arguments to pass
+ along to the wrapped callable.
+
+ Returns:
+ Any: The result of the wrapped callable.
+
+ Raises:
+ ValueError: If the transaction does not succeed in
+ ``max_attempts``.
+ """
+ self._reset()
+
+ for attempt in range(transaction._max_attempts):
+ result = await self._pre_commit(transaction, *args, **kwargs)
+ succeeded = await self._maybe_commit(transaction)
+ if succeeded:
+ return result
+
+ # Subsequent requests will use the failed transaction ID as part of
+ # the ``BeginTransactionRequest`` when restarting this transaction
+ # (via ``options.retry_transaction``). This preserves the "spot in
+ # line" of the transaction, so exponential backoff is not required
+ # in this case.
+
+ await transaction._rollback()
+ msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts)
+ raise ValueError(msg)
+
+
+def async_transactional(
+ to_wrap: Callable[[AsyncTransaction], Any]
+) -> _AsyncTransactional:
+ """Decorate a callable so that it runs in a transaction.
+
+ Args:
+ to_wrap
+ (Callable[[:class:`~google.cloud.firestore_v1.transaction.Transaction`, ...], Any]):
+ A callable that should be run (and retried) in a transaction.
+
+ Returns:
+ Callable[[:class:`~google.cloud.firestore_v1.transaction.Transaction`, ...], Any]:
+ the wrapped callable.
+ """
+ return _AsyncTransactional(to_wrap)
+
+
+# TODO(crwilcox): this was 'coroutine' from pytype merge-pyi...
+async def _commit_with_retry(
+ client: Client, write_pbs: list, transaction_id: bytes
+) -> types.CommitResponse:
+ """Call ``Commit`` on the GAPIC client with retry / sleep.
+
+ Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level
+ retry is handled by the underlying GAPICd client, but in this case it
+ doesn't because ``Commit`` is not always idempotent. But here we know it
+ is "idempotent"-like because it has a transaction ID. We also need to do
+ our own retry to special-case the ``INVALID_ARGUMENT`` error.
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client with GAPIC client and configuration details.
+ write_pbs (List[:class:`google.cloud.proto.firestore.v1.write.Write`, ...]):
+ A ``Write`` protobuf instance to be committed.
+ transaction_id (bytes):
+ ID of an existing transaction that this commit will run in.
+
+ Returns:
+ :class:`google.cloud.firestore_v1.types.CommitResponse`:
+ The protobuf response from ``Commit``.
+
+ Raises:
+ ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable
+ exception is encountered.
+ """
+ current_sleep = _INITIAL_SLEEP
+ while True:
+ try:
+ return await client._firestore_api.commit(
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": transaction_id,
+ },
+ metadata=client._rpc_metadata,
+ )
+ except exceptions.ServiceUnavailable:
+ # Retry
+ pass
+
+ current_sleep = await _sleep(current_sleep)
+
+
+async def _sleep(
+ current_sleep: float, max_sleep: float = _MAX_SLEEP, multiplier: float = _MULTIPLIER
+) -> float:
+ """Sleep and produce a new sleep time.
+
+ .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\
+ 2015/03/backoff.html
+
+ Select a duration between zero and ``current_sleep``. It might seem
+ counterintuitive to have so much jitter, but
+ `Exponential Backoff And Jitter`_ argues that "full jitter" is
+ the best strategy.
+
+ Args:
+ current_sleep (float): The current "max" for sleep interval.
+ max_sleep (Optional[float]): Eventual "max" sleep time
+ multiplier (Optional[float]): Multiplier for exponential backoff.
+
+ Returns:
+ float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever
+ is smaller)
+ """
+ actual_sleep = random.uniform(0.0, current_sleep)
+ await asyncio.sleep(actual_sleep)
+ return min(multiplier * current_sleep, max_sleep)
diff --git a/google/cloud/firestore_v1beta1/batch.py b/google/cloud/firestore_v1/base_batch.py
similarity index 50%
rename from google/cloud/firestore_v1beta1/batch.py
rename to google/cloud/firestore_v1/base_batch.py
index f3e1018abc..348a6ac454 100644
--- a/google/cloud/firestore_v1beta1/batch.py
+++ b/google/cloud/firestore_v1/base_batch.py
@@ -1,4 +1,4 @@
-# Copyright 2017 Google LLC All rights reserved.
+# Copyright 2020 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,64 +15,73 @@
"""Helpers for batch requests to the Google Cloud Firestore API."""
-from google.cloud.firestore_v1beta1 import _helpers
+from google.cloud.firestore_v1 import _helpers
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.document import DocumentReference
-class WriteBatch(object):
+from typing import Union
+
+
+class BaseWriteBatch(object):
"""Accumulate write operations to be sent in a batch.
This has the same set of methods for write operations that
- :class:`~google.cloud.firestore_v1beta1.document.DocumentReference`
- does, e.g.
- :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.create`.
+ :class:`~google.cloud.firestore_v1.document.DocumentReference` does,
+ e.g. :meth:`~google.cloud.firestore_v1.document.DocumentReference.create`.
Args:
- client (~.firestore_v1beta1.client.Client): The client that
- created this batch.
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ The client that created this batch.
"""
- def __init__(self, client):
+ def __init__(self, client) -> None:
self._client = client
self._write_pbs = []
self.write_results = None
self.commit_time = None
- def _add_write_pbs(self, write_pbs):
+ def _add_write_pbs(self, write_pbs: list) -> None:
"""Add `Write`` protobufs to this transaction.
This method intended to be over-ridden by subclasses.
Args:
- write_pbs (List[google.cloud.proto.firestore.v1beta1.\
+ write_pbs (List[google.cloud.proto.firestore.v1.\
write_pb2.Write]): A list of write protobufs to be added.
"""
self._write_pbs.extend(write_pbs)
- def create(self, reference, document_data):
+ def create(self, reference: DocumentReference, document_data: dict) -> None:
"""Add a "change" to this batch to create a document.
If the document given by ``reference`` already exists, then this
batch will fail when :meth:`commit`-ed.
Args:
- reference (~.firestore_v1beta1.document.DocumentReference): A
- document reference to be created in this batch.
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference to be created in this batch.
document_data (dict): Property names and values to use for
creating a document.
"""
write_pbs = _helpers.pbs_for_create(reference._document_path, document_data)
self._add_write_pbs(write_pbs)
- def set(self, reference, document_data, merge=False):
+ def set(
+ self,
+ reference: DocumentReference,
+ document_data: dict,
+ merge: Union[bool, list] = False,
+ ) -> None:
"""Add a "change" to replace a document.
See
- :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.set`
- for more information on how ``option`` determines how the change is
+ :meth:`google.cloud.firestore_v1.document.DocumentReference.set` for
+ more information on how ``option`` determines how the change is
applied.
Args:
- reference (~.firestore_v1beta1.document.DocumentReference):
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
A document reference that will have values set in this batch.
document_data (dict):
Property names and values to use for replacing a document.
@@ -91,21 +100,26 @@ def set(self, reference, document_data, merge=False):
self._add_write_pbs(write_pbs)
- def update(self, reference, field_updates, option=None):
+ def update(
+ self,
+ reference: DocumentReference,
+ field_updates: dict,
+ option: _helpers.WriteOption = None,
+ ) -> None:
"""Add a "change" to update a document.
See
- :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.update`
+ :meth:`google.cloud.firestore_v1.document.DocumentReference.update`
for more information on ``field_updates`` and ``option``.
Args:
- reference (~.firestore_v1beta1.document.DocumentReference): A
- document reference that will be deleted in this batch.
- field_updates (dict): Field names or paths to update and values
- to update with.
- option (Optional[~.firestore_v1beta1.client.WriteOption]): A
- write option to make assertions / preconditions on the server
- state of the document before applying changes.
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference that will be updated in this batch.
+ field_updates (dict):
+ Field names or paths to update and values to update with.
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
"""
if option.__class__.__name__ == "ExistsOption":
raise ValueError("you must not pass an explicit write option to " "update.")
@@ -114,49 +128,32 @@ def update(self, reference, field_updates, option=None):
)
self._add_write_pbs(write_pbs)
- def delete(self, reference, option=None):
+ def delete(
+ self, reference: DocumentReference, option: _helpers.WriteOption = None
+ ) -> None:
"""Add a "change" to delete a document.
See
- :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.delete`
+ :meth:`google.cloud.firestore_v1.document.DocumentReference.delete`
for more information on how ``option`` determines how the change is
applied.
Args:
- reference (~.firestore_v1beta1.document.DocumentReference): A
- document reference that will be deleted in this batch.
- option (Optional[~.firestore_v1beta1.client.WriteOption]): A
- write option to make assertions / preconditions on the server
- state of the document before applying changes.
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference that will be deleted in this batch.
+ option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
+ A write option to make assertions / preconditions on the server
+ state of the document before applying changes.
"""
write_pb = _helpers.pb_for_delete(reference._document_path, option)
self._add_write_pbs([write_pb])
- def commit(self):
- """Commit the changes accumulated in this batch.
-
- Returns:
- List[google.cloud.proto.firestore.v1beta1.\
- write_pb2.WriteResult, ...]: The write results corresponding
- to the changes committed, returned in the same order as the
- changes were applied to this batch. A write result contains an
- ``update_time`` field.
- """
- commit_response = self._client._firestore_api.commit(
- self._client._database_string,
- self._write_pbs,
- transaction=None,
- metadata=self._client._rpc_metadata,
- )
-
- self._write_pbs = []
- self.write_results = results = list(commit_response.write_results)
- self.commit_time = commit_response.commit_time
- return results
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- if exc_type is None:
- self.commit()
+ def _prep_commit(self, retry, timeout):
+ """Shared setup for async/sync :meth:`commit`."""
+ request = {
+ "database": self._client._database_string,
+ "writes": self._write_pbs,
+ "transaction": None,
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ return request, kwargs
diff --git a/google/cloud/firestore_v1beta1/client.py b/google/cloud/firestore_v1/base_client.py
similarity index 52%
rename from google/cloud/firestore_v1beta1/client.py
rename to google/cloud/firestore_v1/base_client.py
index 50036f0adb..f532ec1b74 100644
--- a/google/cloud/firestore_v1beta1/client.py
+++ b/google/cloud/firestore_v1/base_client.py
@@ -18,44 +18,60 @@
In the hierarchy of API concepts
-* a :class:`~google.cloud.firestore_v1beta1.client.Client` owns a
- :class:`~google.cloud.firestore_v1beta1.collection.CollectionReference`
-* a :class:`~google.cloud.firestore_v1beta1.client.Client` owns a
- :class:`~google.cloud.firestore_v1beta1.document.DocumentReference`
+* a :class:`~google.cloud.firestore_v1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`
+* a :class:`~google.cloud.firestore_v1.client.Client` owns a
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`
"""
-import warnings
-from google.cloud.client import ClientWithProject
+import os
+
+import google.api_core.client_options # type: ignore
+import google.api_core.path_template # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core.gapic_v1 import client_info # type: ignore
+from google.cloud.client import ClientWithProject # type: ignore
+
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import __version__
+from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
+
+from google.cloud.firestore_v1.field_path import render_field_path
+from typing import (
+ Any,
+ AsyncGenerator,
+ Generator,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+ Union,
+)
-from google.cloud.firestore_v1beta1 import _helpers
-from google.cloud.firestore_v1beta1 import types
-from google.cloud.firestore_v1beta1.batch import WriteBatch
-from google.cloud.firestore_v1beta1.collection import CollectionReference
-from google.cloud.firestore_v1beta1.document import DocumentReference
-from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-from google.cloud.firestore_v1beta1.field_path import render_field_path
-from google.cloud.firestore_v1beta1.gapic import firestore_client
-from google.cloud.firestore_v1beta1.gapic.transports import firestore_grpc_transport
-from google.cloud.firestore_v1beta1.transaction import Transaction
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+from google.cloud.firestore_v1.base_document import BaseDocumentReference
+from google.cloud.firestore_v1.base_transaction import BaseTransaction
+from google.cloud.firestore_v1.base_batch import BaseWriteBatch
+from google.cloud.firestore_v1.base_query import BaseQuery
DEFAULT_DATABASE = "(default)"
-"""str: The default database used in a :class:`~google.cloud.firestore.client.Client`."""
+"""str: The default database used in a :class:`~google.cloud.firestore_v1.client.Client`."""
_BAD_OPTION_ERR = (
"Exactly one of ``last_update_time`` or ``exists`` " "must be provided."
)
-_BAD_DOC_TEMPLATE = (
+_BAD_DOC_TEMPLATE: str = (
"Document {!r} appeared in response but was not present among references"
)
-_ACTIVE_TXN = "There is already an active transaction."
-_INACTIVE_TXN = "There is no active transaction."
-_V1BETA1_DEPRECATED_MESSAGE = (
- "The 'v1beta1' API endpoint is deprecated. "
- "The client/library which supports it will be removed in a future release."
-)
+_ACTIVE_TXN: str = "There is already an active transaction."
+_INACTIVE_TXN: str = "There is no active transaction."
+_CLIENT_INFO: Any = client_info.ClientInfo(client_library_version=__version__)
+_FIRESTORE_EMULATOR_HOST: str = "FIRESTORE_EMULATOR_HOST"
-class Client(ClientWithProject):
+class BaseClient(ClientWithProject):
"""Client for interacting with Google Cloud Firestore API.
.. note::
@@ -73,6 +89,14 @@ class Client(ClientWithProject):
database (Optional[str]): The database name that the client targets.
For now, :attr:`DEFAULT_DATABASE` (the default value) is the
only valid database.
+ client_info (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
+ The client info used to send a user-agent string along with API
+ requests. If ``None``, then default info will be used. Generally,
+ you only need to set this if you're developing your own library
+ or partner tool.
+ client_options (Union[dict, google.api_core.client_options.ClientOptions]):
+ Client options used to set user options on the client. API Endpoint
+ should be set through client_options.
"""
SCOPE = (
@@ -85,51 +109,76 @@ class Client(ClientWithProject):
_database_string_internal = None
_rpc_metadata_internal = None
- def __init__(self, project=None, credentials=None, database=DEFAULT_DATABASE):
- warnings.warn(_V1BETA1_DEPRECATED_MESSAGE, DeprecationWarning, stacklevel=2)
+ def __init__(
+ self,
+ project=None,
+ credentials=None,
+ database=DEFAULT_DATABASE,
+ client_info=_CLIENT_INFO,
+ client_options=None,
+ ) -> None:
# NOTE: This API has no use for the _http argument, but sending it
# will have no impact since the _http() @property only lazily
# creates a working HTTP object.
- super(Client, self).__init__(
- project=project, credentials=credentials, _http=None
+ super(BaseClient, self).__init__(
+ project=project,
+ credentials=credentials,
+ client_options=client_options,
+ _http=None,
)
+ self._client_info = client_info
+ if client_options:
+ if type(client_options) == dict:
+ client_options = google.api_core.client_options.from_dict( # type: ignore
+ client_options
+ )
+ self._client_options = client_options
+
self._database = database
+ self._emulator_host = os.getenv(_FIRESTORE_EMULATOR_HOST)
- @property
- def _firestore_api(self):
+ def _firestore_api_helper(self, transport, client_class, client_module) -> Any:
"""Lazy-loading getter GAPIC Firestore API.
-
Returns:
- ~.gapic.firestore.v1beta1.firestore_client.FirestoreClient: The
- GAPIC client with the credentials of the current client.
+ The GAPIC client with the credentials of the current client.
"""
if self._firestore_api_internal is None:
# Use a custom channel.
# We need this in order to set appropriate keepalive options.
- channel = firestore_grpc_transport.FirestoreGrpcTransport.create_channel(
- self._target,
- credentials=self._credentials,
- options={"grpc.keepalive_time_ms": 30000}.items(),
- )
-
- self._transport = firestore_grpc_transport.FirestoreGrpcTransport(
- address=self._target, channel=channel
- )
- self._firestore_api_internal = firestore_client.FirestoreClient(
- transport=self._transport
+ if self._emulator_host is not None:
+ # TODO(microgen): this likely needs to be adapted to use insecure_channel
+ # on new generated surface.
+ channel = transport.create_channel(host=self._emulator_host)
+ else:
+ channel = transport.create_channel(
+ self._target,
+ credentials=self._credentials,
+ options={"grpc.keepalive_time_ms": 30000}.items(),
+ )
+
+ self._transport = transport(host=self._target, channel=channel)
+
+ self._firestore_api_internal = client_class(
+ transport=self._transport, client_options=self._client_options
)
+ client_module._client_info = self._client_info
return self._firestore_api_internal
- @property
- def _target(self):
+ def _target_helper(self, client_class) -> str:
"""Return the target (where the API is).
+ Eg. "firestore.googleapis.com"
Returns:
str: The location of the API.
"""
- return firestore_client.FirestoreClient.SERVICE_ADDRESS
+ if self._emulator_host is not None:
+ return self._emulator_host
+ elif self._client_options and self._client_options.api_endpoint:
+ return self._client_options.api_endpoint
+ else:
+ return client_class.DEFAULT_ENDPOINT
@property
def _database_string(self):
@@ -148,11 +197,12 @@ def _database_string(self):
project. (The default database is also in this string.)
"""
if self._database_string_internal is None:
- # NOTE: database_root_path() is a classmethod, so we don't use
- # self._firestore_api (it isn't necessary).
- db_str = firestore_client.FirestoreClient.database_root_path(
- self.project, self._database
+ db_str = google.api_core.path_template.expand( # type: ignore
+ "projects/{project}/databases/{database}",
+ project=self.project,
+ database=self._database,
)
+
self._database_string_internal = db_str
return self._database_string_internal
@@ -170,84 +220,60 @@ def _rpc_metadata(self):
self._database_string
)
- return self._rpc_metadata_internal
-
- def collection(self, *collection_path):
- """Get a reference to a collection.
-
- For a top-level collection:
-
- .. code-block:: python
-
- >>> client.collection('top')
+ if self._emulator_host is not None:
+ # The emulator requires additional metadata to be set.
+ self._rpc_metadata_internal.append(("authorization", "Bearer owner"))
- For a sub-collection:
+ return self._rpc_metadata_internal
- .. code-block:: python
+ def collection(self, *collection_path) -> BaseCollectionReference:
+ raise NotImplementedError
- >>> client.collection('mydocs/doc/subcol')
- >>> # is the same as
- >>> client.collection('mydocs', 'doc', 'subcol')
+ def collection_group(self, collection_id: str) -> BaseQuery:
+ raise NotImplementedError
- Sub-collections can be nested deeper in a similar fashion.
+ def _get_collection_reference(self, collection_id: str) -> BaseCollectionReference:
+ """Checks validity of collection_id and then uses subclasses collection implementation.
Args:
- collection_path (Tuple[str, ...]): Can either be
+ collection_id (str) Identifies the collections to query over.
- * A single ``/``-delimited path to a collection
- * A tuple of collection path segments
+ Every collection or subcollection with this ID as the last segment of its
+ path will be included. Cannot contain a slash.
Returns:
- ~.firestore_v1beta1.collection.CollectionReference: A reference
- to a collection in the Firestore database.
+ The created collection.
"""
- if len(collection_path) == 1:
- path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
- else:
- path = collection_path
-
- return CollectionReference(*path, client=self)
-
- def document(self, *document_path):
- """Get a reference to a document in a collection.
-
- For a top-level document:
-
- .. code-block:: python
-
- >>> client.document('collek/shun')
- >>> # is the same as
- >>> client.document('collek', 'shun')
-
- For a document in a sub-collection:
+ if "/" in collection_id:
+ raise ValueError(
+ "Invalid collection_id "
+ + collection_id
+ + ". Collection IDs must not contain '/'."
+ )
- .. code-block:: python
+ return self.collection(collection_id)
- >>> client.document('mydocs/doc/subcol/child')
- >>> # is the same as
- >>> client.document('mydocs', 'doc', 'subcol', 'child')
+ def document(self, *document_path) -> BaseDocumentReference:
+ raise NotImplementedError
- Documents in sub-collections can be nested deeper in a similar fashion.
+ def _document_path_helper(self, *document_path) -> List[str]:
+ """Standardize the format of path to tuple of path segments and strip the database string from path if present.
Args:
document_path (Tuple[str, ...]): Can either be
* A single ``/``-delimited path to a document
* A tuple of document path segments
-
- Returns:
- ~.firestore_v1beta1.document.DocumentReference: A reference
- to a document in a collection.
"""
- if len(document_path) == 1:
- path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
- else:
- path = document_path
-
- return DocumentReference(*path, client=self)
+ path = _path_helper(document_path)
+ base_path = self._database_string + "/documents/"
+ joined_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path)
+ if joined_path.startswith(base_path):
+ joined_path = joined_path[len(base_path) :]
+ return joined_path.split(_helpers.DOCUMENT_PATH_DELIMITER)
@staticmethod
- def field_path(*field_names):
+ def field_path(*field_names: Tuple[str]) -> str:
"""Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
@@ -276,7 +302,11 @@ def field_path(*field_names):
return render_field_path(field_names)
@staticmethod
- def write_option(**kwargs):
+ def write_option(
+ **kwargs,
+ ) -> Union[
+ _helpers.ExistsOption, _helpers.LastUpdateOption,
+ ]:
"""Create a write option for write operations.
Write operations include :meth:`~google.cloud.DocumentReference.set`,
@@ -307,6 +337,10 @@ def write_option(**kwargs):
Raises:
TypeError: If anything other than exactly one argument is
provided by the caller.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.client.WriteOption`:
+ The option to be used to configure a write message.
"""
if len(kwargs) != 1:
raise TypeError(_BAD_OPTION_ERR)
@@ -320,101 +354,67 @@ def write_option(**kwargs):
extra = "{!r} was provided".format(name)
raise TypeError(_BAD_OPTION_ERR, extra)
- def get_all(self, references, field_paths=None, transaction=None):
- """Retrieve a batch of documents.
-
- .. note::
-
- Documents returned by this method are not guaranteed to be
- returned in the same order that they are given in ``references``.
-
- .. note::
-
- If multiple ``references`` refer to the same document, the server
- will only return one result.
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
- for more information on **field paths**.
-
- If a ``transaction`` is used and it already has write operations
- added, this method cannot be used (i.e. read-after-write is not
- allowed).
-
- Args:
- references (List[.DocumentReference, ...]): Iterable of document
- references to be retrieved.
- field_paths (Optional[Iterable[str, ...]]): An iterable of field
- paths (``.``-delimited list of field names) to use as a
- projection of document fields in the returned results. If
- no value is provided, all fields will be returned.
- transaction (Optional[~.firestore_v1beta1.transaction.\
- Transaction]): An existing transaction that these
- ``references`` will be retrieved in.
-
- Yields:
- .DocumentSnapshot: The next document snapshot that fulfills the
- query, or :data:`None` if the document does not exist.
- """
+ def _prep_get_all(
+ self,
+ references: list,
+ field_paths: Iterable[str] = None,
+ transaction: BaseTransaction = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Tuple[dict, dict, dict]:
+ """Shared setup for async/sync :meth:`get_all`."""
document_paths, reference_map = _reference_info(references)
mask = _get_doc_mask(field_paths)
- response_iterator = self._firestore_api.batch_get_documents(
- self._database_string,
- document_paths,
- mask,
- transaction=_helpers.get_transaction_id(transaction),
- metadata=self._rpc_metadata,
- )
-
- for get_doc_response in response_iterator:
- yield _parse_batch_get(get_doc_response, reference_map, self)
-
- def collections(self):
- """List top-level collections of the client's database.
-
- Returns:
- Sequence[~.firestore_v1beta1.collection.CollectionReference]:
- iterator of subcollections of the current document.
- """
- iterator = self._firestore_api.list_collection_ids(
- self._database_string, metadata=self._rpc_metadata
- )
- iterator.client = self
- iterator.item_to_value = _item_to_collection_ref
- return iterator
-
- def batch(self):
- """Get a batch instance from this client.
-
- Returns:
- ~.firestore_v1beta1.batch.WriteBatch: A "write" batch to be
- used for accumulating document changes and sending the changes
- all at once.
- """
- return WriteBatch(self)
-
- def transaction(self, **kwargs):
- """Get a transaction that uses this client.
-
- See :class:`~google.cloud.firestore_v1beta1.transaction.Transaction`
- for more information on transactions and the constructor arguments.
-
- Args:
- kwargs (Dict[str, Any]): The keyword arguments (other than
- ``client``) to pass along to the
- :class:`~google.cloud.firestore_v1beta1.transaction.Transaction`
- constructor.
-
- Returns:
- ~.firestore_v1beta1.transaction.Transaction: A transaction
- attached to this client.
- """
- return Transaction(self, **kwargs)
-
-
-def _reference_info(references):
+ request = {
+ "database": self._database_string,
+ "documents": document_paths,
+ "mask": mask,
+ "transaction": _helpers.get_transaction_id(transaction),
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, reference_map, kwargs
+
+ def get_all(
+ self,
+ references: list,
+ field_paths: Iterable[str] = None,
+ transaction: BaseTransaction = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Union[
+ AsyncGenerator[DocumentSnapshot, Any], Generator[DocumentSnapshot, Any, Any]
+ ]:
+ raise NotImplementedError
+
+ def _prep_collections(
+ self, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[dict, dict]:
+ """Shared setup for async/sync :meth:`collections`."""
+ request = {"parent": "{}/documents".format(self._database_string)}
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, kwargs
+
+ def collections(
+ self, retry: retries.Retry = None, timeout: float = None,
+ ) -> Union[
+ AsyncGenerator[BaseCollectionReference, Any],
+ Generator[BaseCollectionReference, Any, Any],
+ ]:
+ raise NotImplementedError
+
+ def batch(self) -> BaseWriteBatch:
+ raise NotImplementedError
+
+ def transaction(self, **kwargs) -> BaseTransaction:
+ raise NotImplementedError
+
+
+def _reference_info(references: list) -> Tuple[list, dict]:
"""Get information about document references.
- Helper for :meth:`~google.cloud.firestore_v1beta1.client.Client.get_all`.
+ Helper for :meth:`~google.cloud.firestore_v1.client.Client.get_all`.
Args:
references (List[.DocumentReference, ...]): Iterable of document
@@ -438,7 +438,7 @@ def _reference_info(references):
return document_paths, reference_map
-def _get_reference(document_path, reference_map):
+def _get_reference(document_path: str, reference_map: dict) -> BaseDocumentReference:
"""Get a document reference from a dictionary.
This just wraps a simple dictionary look-up with a helpful error that is
@@ -464,18 +464,22 @@ def _get_reference(document_path, reference_map):
raise ValueError(msg)
-def _parse_batch_get(get_doc_response, reference_map, client):
+def _parse_batch_get(
+ get_doc_response: types.BatchGetDocumentsResponse,
+ reference_map: dict,
+ client: BaseClient,
+) -> DocumentSnapshot:
"""Parse a `BatchGetDocumentsResponse` protobuf.
Args:
- get_doc_response (~google.cloud.proto.firestore.v1beta1.\
- firestore_pb2.BatchGetDocumentsResponse): A single response (from
+ get_doc_response (~google.cloud.proto.firestore.v1.\
+ firestore.BatchGetDocumentsResponse): A single response (from
a stream) containing the "get" response for a document.
reference_map (Dict[str, .DocumentReference]): A mapping (produced
by :func:`_reference_info`) of fully-qualified document paths to
document references.
- client (~.firestore_v1beta1.client.Client): A client that has
- a document factory.
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ A client that has a document factory.
Returns:
[.DocumentSnapshot]: The retrieved snapshot.
@@ -484,7 +488,7 @@ def _parse_batch_get(get_doc_response, reference_map, client):
ValueError: If the response has a ``result`` field (a oneof) other
than ``found`` or ``missing``.
"""
- result_type = get_doc_response.WhichOneof("result")
+ result_type = get_doc_response._pb.WhichOneof("result")
if result_type == "found":
reference = _get_reference(get_doc_response.found.name, reference_map)
data = _helpers.decode_dict(get_doc_response.found.fields, client)
@@ -497,8 +501,9 @@ def _parse_batch_get(get_doc_response, reference_map, client):
update_time=get_doc_response.found.update_time,
)
elif result_type == "missing":
+ reference = _get_reference(get_doc_response.missing, reference_map)
snapshot = DocumentSnapshot(
- None,
+ reference,
None,
exists=False,
read_time=get_doc_response.read_time,
@@ -513,7 +518,7 @@ def _parse_batch_get(get_doc_response, reference_map, client):
return snapshot
-def _get_doc_mask(field_paths):
+def _get_doc_mask(field_paths: Iterable[str]) -> Optional[types.common.DocumentMask]:
"""Get a document mask if field paths are provided.
Args:
@@ -522,7 +527,7 @@ def _get_doc_mask(field_paths):
projection of document fields in the returned results.
Returns:
- Optional[google.cloud.firestore_v1beta1.types.DocumentMask]: A mask
+ Optional[google.cloud.firestore_v1.types.common.DocumentMask]: A mask
to project documents to a restricted set of field paths.
"""
if field_paths is None:
@@ -531,12 +536,16 @@ def _get_doc_mask(field_paths):
return types.DocumentMask(field_paths=field_paths)
-def _item_to_collection_ref(iterator, item):
- """Convert collection ID to collection ref.
+def _path_helper(path: tuple) -> Tuple[str]:
+ """Standardize path into a tuple of path segments.
Args:
- iterator (google.api_core.page_iterator.GRPCIterator):
- iterator response
- item (str): ID of the collection
+ path (Tuple[str, ...]): Can either be
+
+ * A single ``/``-delimited path
+ * A tuple of path segments
"""
- return iterator.client.collection(item)
+ if len(path) == 1:
+ return path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
+ else:
+ return path
diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py
new file mode 100644
index 0000000000..956c4b4b15
--- /dev/null
+++ b/google/cloud/firestore_v1/base_collection.py
@@ -0,0 +1,463 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing collections for the Google Cloud Firestore API."""
+import random
+
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1.document import DocumentReference
+from typing import (
+ Any,
+ AsyncGenerator,
+ Coroutine,
+ Generator,
+ AsyncIterator,
+ Iterator,
+ Iterable,
+ NoReturn,
+ Tuple,
+ Union,
+)
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
+from google.cloud.firestore_v1.base_query import BaseQuery
+from google.cloud.firestore_v1.transaction import Transaction
+
+_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+
+
+class BaseCollectionReference(object):
+ """A reference to a collection in a Firestore database.
+
+ The collection may already exist or this class can facilitate creation
+ of documents within the collection.
+
+ Args:
+ path (Tuple[str, ...]): The components in the collection path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection.
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1.client.Client` if provided. It
+ represents the client that created this collection reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ def __init__(self, *path, **kwargs) -> None:
+ _helpers.verify_path(path, is_collection=True)
+ self._path = path
+ self._client = kwargs.pop("client", None)
+ if kwargs:
+ raise TypeError(
+ "Received unexpected arguments", kwargs, "Only `client` is supported"
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._path == other._path and self._client == other._client
+
+ @property
+ def id(self):
+ """The collection identifier.
+
+ Returns:
+ str: The last component of the path.
+ """
+ return self._path[-1]
+
+ @property
+ def parent(self):
+ """Document that owns the current collection.
+
+ Returns:
+ Optional[:class:`~google.cloud.firestore_v1.document.DocumentReference`]:
+ The parent document, if the current collection is not a
+ top-level collection.
+ """
+ if len(self._path) == 1:
+ return None
+ else:
+ parent_path = self._path[:-1]
+ return self._client.document(*parent_path)
+
+ def _query(self) -> BaseQuery:
+ raise NotImplementedError
+
+ def document(self, document_id: str = None) -> DocumentReference:
+ """Create a sub-document underneath the current collection.
+
+ Args:
+ document_id (Optional[str]): The document identifier
+ within the current collection. If not provided, will default
+ to a random 20 character string composed of digits,
+ uppercase and lowercase and letters.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`:
+ The child document.
+ """
+ if document_id is None:
+ document_id = _auto_id()
+
+ child_path = self._path + (document_id,)
+ return self._client.document(*child_path)
+
+ def _parent_info(self) -> Tuple[Any, str]:
+ """Get fully-qualified parent path and prefix for this collection.
+
+ Returns:
+ Tuple[str, str]: Pair of
+
+ * the fully-qualified (with database and project) path to the
+ parent of this collection (will either be the database path
+ or a document path).
+ * the prefix to a document in this collection.
+ """
+ parent_doc = self.parent
+ if parent_doc is None:
+ parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join(
+ (self._client._database_string, "documents")
+ )
+ else:
+ parent_path = parent_doc._document_path
+
+ expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id))
+ return parent_path, expected_prefix
+
+ def _prep_add(
+ self,
+ document_data: dict,
+ document_id: str = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Tuple[DocumentReference, dict]:
+ """Shared setup for async / sync :method:`add`"""
+ if document_id is None:
+ document_id = _auto_id()
+
+ document_ref = self.document(document_id)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return document_ref, kwargs
+
+ def add(
+ self,
+ document_data: dict,
+ document_id: str = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Union[Tuple[Any, Any], Coroutine[Any, Any, Tuple[Any, Any]]]:
+ raise NotImplementedError
+
+ def _prep_list_documents(
+ self, page_size: int = None, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[dict, dict]:
+ """Shared setup for async / sync :method:`list_documents`"""
+ parent, _ = self._parent_info()
+ request = {
+ "parent": parent,
+ "collection_id": self.id,
+ "page_size": page_size,
+ "show_missing": True,
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, kwargs
+
+ def list_documents(
+ self, page_size: int = None, retry: retries.Retry = None, timeout: float = None,
+ ) -> Union[
+ Generator[DocumentReference, Any, Any], AsyncGenerator[DocumentReference, Any]
+ ]:
+ raise NotImplementedError
+
+ def select(self, field_paths: Iterable[str]) -> BaseQuery:
+ """Create a "select" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.select` for
+ more information on this method.
+
+ Args:
+ field_paths (Iterable[str, ...]): An iterable of field paths
+ (``.``-delimited list of field names) to use as a projection
+ of document fields in the query results.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A "projected" query.
+ """
+ query = self._query()
+ return query.select(field_paths)
+
+ def where(self, field_path: str, op_string: str, value) -> BaseQuery:
+ """Create a "where" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.where` for
+ more information on this method.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) for the field to filter on.
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``>=``
+ and ``>``.
+ value (Any): The value to compare the field against in the filter.
+ If ``value`` is :data:`None` or a NaN, then ``==`` is the only
+ allowed operation.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A filtered query.
+ """
+ query = self._query()
+ return query.where(field_path, op_string, value)
+
+ def order_by(self, field_path: str, **kwargs) -> BaseQuery:
+ """Create an "order by" query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by` for
+ more information on this method.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) on which to order the query results.
+ kwargs (Dict[str, Any]): The keyword arguments to pass along
+ to the query. The only supported keyword is ``direction``,
+ see :meth:`~google.cloud.firestore_v1.query.Query.order_by`
+ for more information.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An "order by" query.
+ """
+ query = self._query()
+ return query.order_by(field_path, **kwargs)
+
+ def limit(self, count: int) -> BaseQuery:
+ """Create a limited query with this collection as parent.
+
+ .. note::
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit` will drop previously set `limit_to_last`.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.limit` for
+ more information on this method.
+
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited query.
+ """
+ query = self._query()
+ return query.limit(count)
+
+ def limit_to_last(self, count: int):
+ """Create a limited to last query with this collection as parent.
+ .. note::
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit_to_last` will drop previously set `limit`.
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.limit_to_last`
+ for more information on this method.
+ Args:
+ count (int): Maximum number of documents to return that
+ match the query.
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited to last query.
+ """
+ query = self._query()
+ return query.limit_to_last(count)
+
+ def offset(self, num_to_skip: int) -> BaseQuery:
+ """Skip to an offset in a query with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.offset` for
+ more information on this method.
+
+ Args:
+ num_to_skip (int): The number of results to skip at the beginning
+ of query results. (Must be non-negative.)
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An offset query.
+ """
+ query = self._query()
+ return query.offset(num_to_skip)
+
+ def start_at(
+ self, document_fields: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> BaseQuery:
+ """Start query at a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.start_at` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = self._query()
+ return query.start_at(document_fields)
+
+ def start_after(
+ self, document_fields: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> BaseQuery:
+ """Start query after a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.start_after` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = self._query()
+ return query.start_after(document_fields)
+
+ def end_before(
+ self, document_fields: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> BaseQuery:
+ """End query before a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.end_before` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = self._query()
+ return query.end_before(document_fields)
+
+ def end_at(
+ self, document_fields: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> BaseQuery:
+ """End query at a cursor with this collection as parent.
+
+ See
+ :meth:`~google.cloud.firestore_v1.query.Query.end_at` for
+ more information on this method.
+
+ Args:
+ document_fields (Union[:class:`~google.cloud.firestore_v1.\
+ document.DocumentSnapshot`, dict, list, tuple]):
+ A document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor.
+ """
+ query = self._query()
+ return query.end_at(document_fields)
+
+ def _prep_get_or_stream(
+ self, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[Any, dict]:
+ """Shared setup for async / sync :meth:`get` / :meth:`stream`"""
+ query = self._query()
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return query, kwargs
+
+ def get(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Union[
+ Generator[DocumentSnapshot, Any, Any], AsyncGenerator[DocumentSnapshot, Any]
+ ]:
+ raise NotImplementedError
+
+ def stream(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Union[Iterator[DocumentSnapshot], AsyncIterator[DocumentSnapshot]]:
+ raise NotImplementedError
+
+ def on_snapshot(self, callback) -> NoReturn:
+ raise NotImplementedError
+
+
+def _auto_id() -> str:
+ """Generate a "random" automatically generated ID.
+
+ Returns:
+ str: A 20 character string composed of digits, uppercase and
+ lowercase and letters.
+ """
+ return "".join(random.choice(_AUTO_ID_CHARS) for _ in range(20))
+
+
+def _item_to_document_ref(collection_reference, item) -> DocumentReference:
+ """Convert Document resource to document ref.
+
+ Args:
+ collection_reference (google.api_core.page_iterator.GRPCIterator):
+ iterator response
+ item (dict): document resource
+ """
+ document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1]
+ return collection_reference.document(document_id)
diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py
new file mode 100644
index 0000000000..441a30b51a
--- /dev/null
+++ b/google/cloud/firestore_v1/base_document.py
@@ -0,0 +1,569 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing documents for the Google Cloud Firestore API."""
+
+import copy
+
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import field_path as field_path_module
+from google.cloud.firestore_v1.types import common
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import write
+from typing import Any, Dict, Iterable, NoReturn, Union, Tuple
+
+
+class BaseDocumentReference(object):
+ """A reference to a document in a Firestore database.
+
+ The document may already exist or can be created by this class.
+
+ Args:
+ path (Tuple[str, ...]): The components in the document path.
+ This is a series of strings representing each collection and
+ sub-collection ID, as well as the document IDs for any documents
+ that contain a sub-collection (as well as the base document).
+ kwargs (dict): The keyword arguments for the constructor. The only
+ supported keyword is ``client`` and it must be a
+ :class:`~google.cloud.firestore_v1.client.Client`. It represents
+ the client that created this document reference.
+
+ Raises:
+ ValueError: if
+
+ * the ``path`` is empty
+ * there are an even number of elements
+ * a collection ID in ``path`` is not a string
+ * a document ID in ``path`` is not a string
+ TypeError: If a keyword other than ``client`` is used.
+ """
+
+ _document_path_internal = None
+
+ def __init__(self, *path, **kwargs) -> None:
+ _helpers.verify_path(path, is_collection=False)
+ self._path = path
+ self._client = kwargs.pop("client", None)
+ if kwargs:
+ raise TypeError(
+ "Received unexpected arguments", kwargs, "Only `client` is supported"
+ )
+
+ def __copy__(self):
+ """Shallow copy the instance.
+
+ We leave the client "as-is" but tuple-unpack the path.
+
+ Returns:
+ .DocumentReference: A copy of the current document.
+ """
+ result = self.__class__(*self._path, client=self._client)
+ result._document_path_internal = self._document_path_internal
+ return result
+
+ def __deepcopy__(self, unused_memo):
+ """Deep copy the instance.
+
+ This isn't a true deep copy, wee leave the client "as-is" but
+ tuple-unpack the path.
+
+ Returns:
+ .DocumentReference: A copy of the current document.
+ """
+ return self.__copy__()
+
+ def __eq__(self, other):
+ """Equality check against another instance.
+
+ Args:
+ other (Any): A value to compare against.
+
+ Returns:
+ Union[bool, NotImplementedType]: Indicating if the values are
+ equal.
+ """
+ if isinstance(other, self.__class__):
+ return self._client == other._client and self._path == other._path
+ else:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(self._path) + hash(self._client)
+
+ def __ne__(self, other):
+ """Inequality check against another instance.
+
+ Args:
+ other (Any): A value to compare against.
+
+ Returns:
+ Union[bool, NotImplementedType]: Indicating if the values are
+ not equal.
+ """
+ if isinstance(other, self.__class__):
+ return self._client != other._client or self._path != other._path
+ else:
+ return NotImplemented
+
+ @property
+ def path(self):
+ """Database-relative for this document.
+
+ Returns:
+ str: The document's relative path.
+ """
+ return "/".join(self._path)
+
+ @property
+ def _document_path(self):
+ """Create and cache the full path for this document.
+
+ Of the form:
+
+ ``projects/{project_id}/databases/{database_id}/...
+ documents/{document_path}``
+
+ Returns:
+ str: The full document path.
+
+ Raises:
+ ValueError: If the current document reference has no ``client``.
+ """
+ if self._document_path_internal is None:
+ if self._client is None:
+ raise ValueError("A document reference requires a `client`.")
+ self._document_path_internal = _get_document_path(self._client, self._path)
+
+ return self._document_path_internal
+
+ @property
+ def id(self):
+ """The document identifier (within its collection).
+
+ Returns:
+ str: The last component of the path.
+ """
+ return self._path[-1]
+
+ @property
+ def parent(self):
+ """Collection that owns the current document.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
+ The parent collection.
+ """
+ parent_path = self._path[:-1]
+ return self._client.collection(*parent_path)
+
+ def collection(self, collection_id: str) -> Any:
+ """Create a sub-collection underneath the current document.
+
+ Args:
+ collection_id (str): The sub-collection identifier (sometimes
+ referred to as the "kind").
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
+ The child collection.
+ """
+ child_path = self._path + (collection_id,)
+ return self._client.collection(*child_path)
+
+ def _prep_create(
+ self, document_data: dict, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[Any, dict]:
+ batch = self._client.batch()
+ batch.create(self, document_data)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return batch, kwargs
+
+ def create(
+ self, document_data: dict, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def _prep_set(
+ self,
+ document_data: dict,
+ merge: bool = False,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Tuple[Any, dict]:
+ batch = self._client.batch()
+ batch.set(self, document_data, merge=merge)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return batch, kwargs
+
+ def set(
+ self,
+ document_data: dict,
+ merge: bool = False,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def _prep_update(
+ self,
+ field_updates: dict,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Tuple[Any, dict]:
+ batch = self._client.batch()
+ batch.update(self, field_updates, option=option)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return batch, kwargs
+
+ def update(
+ self,
+ field_updates: dict,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def _prep_delete(
+ self,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Tuple[dict, dict]:
+ """Shared setup for async/sync :meth:`delete`."""
+ write_pb = _helpers.pb_for_delete(self._document_path, option)
+ request = {
+ "database": self._client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, kwargs
+
+ def delete(
+ self,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def _prep_get(
+ self,
+ field_paths: Iterable[str] = None,
+ transaction=None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> Tuple[dict, dict]:
+ """Shared setup for async/sync :meth:`get`."""
+ if isinstance(field_paths, str):
+ raise ValueError("'field_paths' must be a sequence of paths, not a string.")
+
+ if field_paths is not None:
+ mask = common.DocumentMask(field_paths=sorted(field_paths))
+ else:
+ mask = None
+
+ request = {
+ "name": self._document_path,
+ "mask": mask,
+ "transaction": _helpers.get_transaction_id(transaction),
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, kwargs
+
+ def get(
+ self,
+ field_paths: Iterable[str] = None,
+ transaction=None,
+ retry: retries.Retry = None,
+ timeout: float = None,
+ ) -> "DocumentSnapshot":
+ raise NotImplementedError
+
+ def _prep_collections(
+ self, page_size: int = None, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[dict, dict]:
+ """Shared setup for async/sync :meth:`collections`."""
+ request = {"parent": self._document_path, "page_size": page_size}
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, kwargs
+
+ def collections(
+ self, page_size: int = None, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def on_snapshot(self, callback) -> NoReturn:
+ raise NotImplementedError
+
+
+class DocumentSnapshot(object):
+ """A snapshot of document data in a Firestore database.
+
+ This represents data retrieved at a specific time and may not contain
+ all fields stored for the document (i.e. a hand-picked selection of
+ fields may have been retrieved).
+
+ Instances of this class are not intended to be constructed by hand,
+ rather they'll be returned as responses to various methods, such as
+ :meth:`~google.cloud.DocumentReference.get`.
+
+ Args:
+ reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
+ A document reference corresponding to the document that contains
+ the data in this snapshot.
+ data (Dict[str, Any]):
+ The data retrieved in the snapshot.
+ exists (bool):
+ Indicates if the document existed at the time the snapshot was
+ retrieved.
+ read_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
+ The time that this snapshot was read from the server.
+ create_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
+ The time that this document was created.
+ update_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
+ The time that this document was last updated.
+ """
+
+ def __init__(
+ self, reference, data, exists, read_time, create_time, update_time
+ ) -> None:
+ self._reference = reference
+ # We want immutable data, so callers can't modify this value
+ # out from under us.
+ self._data = copy.deepcopy(data)
+ self._exists = exists
+ self.read_time = read_time
+ self.create_time = create_time
+ self.update_time = update_time
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self._reference == other._reference and self._data == other._data
+
+ def __hash__(self):
+ seconds = int(self.update_time.timestamp())
+ nanos = self.update_time.nanosecond
+ return hash(self._reference) + hash(seconds) + hash(nanos)
+
+ @property
+ def _client(self):
+ """The client that owns the document reference for this snapshot.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.client.Client`:
+ The client that owns this document.
+ """
+ return self._reference._client
+
+ @property
+ def exists(self):
+ """Existence flag.
+
+ Indicates if the document existed at the time this snapshot
+ was retrieved.
+
+ Returns:
+ bool: The existence flag.
+ """
+ return self._exists
+
+ @property
+ def id(self):
+ """The document identifier (within its collection).
+
+ Returns:
+ str: The last component of the path of the document.
+ """
+ return self._reference.id
+
+ @property
+ def reference(self):
+ """Document reference corresponding to document that owns this data.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.document.DocumentReference`:
+ A document reference corresponding to this document.
+ """
+ return self._reference
+
+ def get(self, field_path: str) -> Any:
+ """Get a value from the snapshot data.
+
+ If the data is nested, for example:
+
+ .. code-block:: python
+
+ >>> snapshot.to_dict()
+ {
+ 'top1': {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ },
+ 'top6': b'\x00\x01 foo',
+ }
+
+ a **field path** can be used to access the nested data. For
+ example:
+
+ .. code-block:: python
+
+ >>> snapshot.get('top1')
+ {
+ 'middle2': {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ },
+ 'middle5': True,
+ }
+ >>> snapshot.get('top1.middle2')
+ {
+ 'bottom3': 20,
+ 'bottom4': 22,
+ }
+ >>> snapshot.get('top1.middle2.bottom3')
+ 20
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ A copy is returned since the data may contain mutable values,
+ but the data stored in the snapshot must remain immutable.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names).
+
+ Returns:
+ Any or None:
+ (A copy of) the value stored for the ``field_path`` or
+ None if snapshot document does not exist.
+
+ Raises:
+ KeyError: If the ``field_path`` does not match nested data
+ in the snapshot.
+ """
+ if not self._exists:
+ return None
+ nested_data = field_path_module.get_nested_value(field_path, self._data)
+ return copy.deepcopy(nested_data)
+
+ def to_dict(self) -> Union[Dict[str, Any], None]:
+ """Retrieve the data contained in this snapshot.
+
+ A copy is returned since the data may contain mutable values,
+ but the data stored in the snapshot must remain immutable.
+
+ Returns:
+ Dict[str, Any] or None:
+ The data in the snapshot. Returns None if reference
+ does not exist.
+ """
+ if not self._exists:
+ return None
+ return copy.deepcopy(self._data)
+
+
+def _get_document_path(client, path: Tuple[str]) -> str:
+ """Convert a path tuple into a full path string.
+
+ Of the form:
+
+ ``projects/{project_id}/databases/{database_id}/...
+ documents/{document_path}``
+
+ Args:
+ client (:class:`~google.cloud.firestore_v1.client.Client`):
+ The client that holds configuration details and a GAPIC client
+ object.
+ path (Tuple[str, ...]): The components in a document path.
+
+ Returns:
+ str: The fully-qualified document path.
+ """
+ parts = (client._database_string, "documents") + path
+ return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
+
+
+def _consume_single_get(response_iterator) -> firestore.BatchGetDocumentsResponse:
+ """Consume a gRPC stream that should contain a single response.
+
+ The stream will correspond to a ``BatchGetDocuments`` request made
+ for a single document.
+
+ Args:
+ response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
+ streaming iterator returned from a ``BatchGetDocuments``
+ request.
+
+ Returns:
+ ~google.cloud.proto.firestore.v1.\
+ firestore.BatchGetDocumentsResponse: The single "get"
+ response in the batch.
+
+ Raises:
+ ValueError: If anything other than exactly one response is returned.
+ """
+ # Calling ``list()`` consumes the entire iterator.
+ all_responses = list(response_iterator)
+ if len(all_responses) != 1:
+ raise ValueError(
+ "Unexpected response from `BatchGetDocumentsResponse`",
+ all_responses,
+ "Expected only one result",
+ )
+
+ return all_responses[0]
+
+
+def _first_write_result(write_results: list) -> write.WriteResult:
+ """Get first write result from list.
+
+ For cases where ``len(write_results) > 1``, this assumes the writes
+ occurred at the same time (e.g. if an update and transform are sent
+ at the same time).
+
+ Args:
+ write_results (List[google.cloud.proto.firestore.v1.\
+ write.WriteResult, ...]: The write results from a
+ ``CommitResponse``.
+
+ Returns:
+ google.cloud.firestore_v1.types.WriteResult: The
+ lone write result from ``write_results``.
+
+ Raises:
+ ValueError: If there are zero write results. This is likely to
+ **never** occur, since the backend should be stable.
+ """
+ if not write_results:
+ raise ValueError("Expected at least one write result")
+
+ return write_results[0]
diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py
new file mode 100644
index 0000000000..6e06719078
--- /dev/null
+++ b/google/cloud/firestore_v1/base_query.py
@@ -0,0 +1,1208 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing queries for the Google Cloud Firestore API.
+
+A :class:`~google.cloud.firestore_v1.query.Query` can be created directly from
+a :class:`~google.cloud.firestore_v1.collection.Collection` and that can be
+a more common way to create a query than direct usage of the constructor.
+"""
+import copy
+import math
+
+from google.api_core import retry as retries # type: ignore
+from google.protobuf import wrappers_pb2
+
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1 import document
+from google.cloud.firestore_v1 import field_path as field_path_module
+from google.cloud.firestore_v1 import transforms
+from google.cloud.firestore_v1.types import StructuredQuery
+from google.cloud.firestore_v1.types import query
+from google.cloud.firestore_v1.types import Cursor
+from google.cloud.firestore_v1.types import RunQueryResponse
+from google.cloud.firestore_v1.order import Order
+from typing import Any, Dict, Iterable, NoReturn, Optional, Tuple, Union
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
+
+_BAD_DIR_STRING: str
+_BAD_OP_NAN_NULL: str
+_BAD_OP_STRING: str
+_COMPARISON_OPERATORS: Dict[str, Any]
+_EQ_OP: str
+_INVALID_CURSOR_TRANSFORM: str
+_INVALID_WHERE_TRANSFORM: str
+_MISMATCH_CURSOR_W_ORDER_BY: str
+_MISSING_ORDER_BY: str
+_NO_ORDERS_FOR_CURSOR: str
+_operator_enum: Any
+
+
+_EQ_OP = "=="
+_operator_enum = StructuredQuery.FieldFilter.Operator
+_COMPARISON_OPERATORS = {
+ "<": _operator_enum.LESS_THAN,
+ "<=": _operator_enum.LESS_THAN_OR_EQUAL,
+ _EQ_OP: _operator_enum.EQUAL,
+ "!=": _operator_enum.NOT_EQUAL,
+ ">=": _operator_enum.GREATER_THAN_OR_EQUAL,
+ ">": _operator_enum.GREATER_THAN,
+ "array_contains": _operator_enum.ARRAY_CONTAINS,
+ "in": _operator_enum.IN,
+ "not-in": _operator_enum.NOT_IN,
+ "array_contains_any": _operator_enum.ARRAY_CONTAINS_ANY,
+}
+_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}."
+_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values'
+_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values."
+_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}."
+_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values."
+_MISSING_ORDER_BY = (
+ 'The "order by" field path {!r} is not present in the cursor data {!r}. '
+ "All fields sent to ``order_by()`` must be present in the fields "
+ "if passed to one of ``start_at()`` / ``start_after()`` / "
+ "``end_before()`` / ``end_at()`` to define a cursor."
+)
+_NO_ORDERS_FOR_CURSOR = (
+ "Attempting to create a cursor with no fields to order on. "
+ "When defining a cursor with one of ``start_at()`` / ``start_after()`` / "
+ "``end_before()`` / ``end_at()``, all fields in the cursor must "
+ "come from fields set in ``order_by()``."
+)
+_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}."
+
+
+class BaseQuery(object):
+ """Represents a query to the Firestore API.
+
+ Instances of this class are considered immutable: all methods that
+ would modify an instance instead return a new instance.
+
+ Args:
+ parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ The collection that this query applies to.
+ projection (Optional[:class:`google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.Projection`]):
+ A projection of document fields to limit the query results to.
+ field_filters (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.FieldFilter`, ...]]):
+ The filters to be applied in the query.
+ orders (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.Order`, ...]]):
+ The "order by" entries to use in the query.
+ limit (Optional[int]):
+ The maximum number of documents the query is allowed to return.
+ limit_to_last (Optional[bool]):
+ Denotes whether a provided limit is applied to the end of the result set.
+ offset (Optional[int]):
+ The number of results to skip.
+ start_at (Optional[Tuple[dict, bool]]):
+ Two-tuple of :
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * an ``after`` flag
+
+ The fields and the flag combine to form a cursor used as
+ a starting point in a query result set. If the ``after``
+ flag is :data:`True`, the results will start just after any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ end_at (Optional[Tuple[dict, bool]]):
+ Two-tuple of:
+
+ * a mapping of fields. Any field that is present in this mapping
+ must also be present in ``orders``
+ * a ``before`` flag
+
+ The fields and the flag combine to form a cursor used as
+ an ending point in a query result set. If the ``before``
+ flag is :data:`True`, the results will end just before any
+ documents which have fields matching the cursor, otherwise
+ any matching documents will be included in the result set.
+ When the query is formed, the document values
+ will be used in the order given by ``orders``.
+ all_descendants (Optional[bool]):
+ When false, selects only collections that are immediate children
+ of the `parent` specified in the containing `RunQueryRequest`.
+ When true, selects all descendant collections.
+ """
+
+ ASCENDING = "ASCENDING"
+ """str: Sort query results in ascending order on a field."""
+ DESCENDING = "DESCENDING"
+ """str: Sort query results in descending order on a field."""
+
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ limit_to_last=False,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ all_descendants=False,
+ ) -> None:
+ self._parent = parent
+ self._projection = projection
+ self._field_filters = field_filters
+ self._orders = orders
+ self._limit = limit
+ self._limit_to_last = limit_to_last
+ self._offset = offset
+ self._start_at = start_at
+ self._end_at = end_at
+ self._all_descendants = all_descendants
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return (
+ self._parent == other._parent
+ and self._projection == other._projection
+ and self._field_filters == other._field_filters
+ and self._orders == other._orders
+ and self._limit == other._limit
+ and self._limit_to_last == other._limit_to_last
+ and self._offset == other._offset
+ and self._start_at == other._start_at
+ and self._end_at == other._end_at
+ and self._all_descendants == other._all_descendants
+ )
+
+ @property
+ def _client(self):
+ """The client of the parent collection.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.client.Client`:
+ The client that owns this query.
+ """
+ return self._parent._client
+
+ def select(self, field_paths: Iterable[str]) -> "BaseQuery":
+ """Project documents matching query to a limited set of fields.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ If the current query already has a projection set (i.e. has already
+ called :meth:`~google.cloud.firestore_v1.query.Query.select`), this
+ will overwrite it.
+
+ Args:
+ field_paths (Iterable[str, ...]): An iterable of field paths
+ (``.``-delimited list of field names) to use as a projection
+ of document fields in the query results.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A "projected" query. Acts as a copy of the current query,
+ modified with the newly added projection.
+ Raises:
+ ValueError: If any ``field_path`` is invalid.
+ """
+ field_paths = list(field_paths)
+ for field_path in field_paths:
+ field_path_module.split_field_path(field_path) # raises
+
+ new_projection = query.StructuredQuery.Projection(
+ fields=[
+ query.StructuredQuery.FieldReference(field_path=field_path)
+ for field_path in field_paths
+ ]
+ )
+ return self.__class__(
+ self._parent,
+ projection=new_projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def where(self, field_path: str, op_string: str, value) -> "BaseQuery":
+ """Filter the query on a field.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ Returns a new :class:`~google.cloud.firestore_v1.query.Query` that
+ filters on a specific field path, according to an operation (e.g.
+ ``==`` or "equals") and a particular value to be paired with that
+ operation.
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) for the field to filter on.
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>``,
+ ``in``, ``not-in``, ``array_contains`` and ``array_contains_any``.
+ value (Any): The value to compare the field against in the filter.
+ If ``value`` is :data:`None` or a NaN, then ``==`` is the only
+ allowed operation.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A filtered query. Acts as a copy of the current query,
+ modified with the newly added filter.
+
+ Raises:
+ ValueError: If ``field_path`` is invalid.
+ ValueError: If ``value`` is a NaN or :data:`None` and
+ ``op_string`` is not ``==``.
+ """
+ field_path_module.split_field_path(field_path) # raises
+
+ if value is None:
+ if op_string != _EQ_OP:
+ raise ValueError(_BAD_OP_NAN_NULL)
+ filter_pb = query.StructuredQuery.UnaryFilter(
+ field=query.StructuredQuery.FieldReference(field_path=field_path),
+ op=StructuredQuery.UnaryFilter.Operator.IS_NULL,
+ )
+ elif _isnan(value):
+ if op_string != _EQ_OP:
+ raise ValueError(_BAD_OP_NAN_NULL)
+ filter_pb = query.StructuredQuery.UnaryFilter(
+ field=query.StructuredQuery.FieldReference(field_path=field_path),
+ op=StructuredQuery.UnaryFilter.Operator.IS_NAN,
+ )
+ elif isinstance(value, (transforms.Sentinel, transforms._ValueList)):
+ raise ValueError(_INVALID_WHERE_TRANSFORM)
+ else:
+ filter_pb = query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(field_path=field_path),
+ op=_enum_from_op_string(op_string),
+ value=_helpers.encode_value(value),
+ )
+
+ new_filters = self._field_filters + (filter_pb,)
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=new_filters,
+ orders=self._orders,
+ limit=self._limit,
+ offset=self._offset,
+ limit_to_last=self._limit_to_last,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ @staticmethod
+ def _make_order(field_path, direction) -> StructuredQuery.Order:
+ """Helper for :meth:`order_by`."""
+ return query.StructuredQuery.Order(
+ field=query.StructuredQuery.FieldReference(field_path=field_path),
+ direction=_enum_from_direction(direction),
+ )
+
+ def order_by(self, field_path: str, direction: str = ASCENDING) -> "BaseQuery":
+ """Modify the query to add an order clause on a specific field.
+
+ See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ more information on **field paths**.
+
+ Successive :meth:`~google.cloud.firestore_v1.query.Query.order_by`
+ calls will further refine the ordering of results returned by the query
+ (i.e. the new "order by" fields will be added to existing ones).
+
+ Args:
+ field_path (str): A field path (``.``-delimited list of
+ field names) on which to order the query results.
+ direction (Optional[str]): The direction to order by. Must be one
+ of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to
+ :attr:`ASCENDING`.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An ordered query. Acts as a copy of the current query, modified
+ with the newly added "order by" constraint.
+
+ Raises:
+ ValueError: If ``field_path`` is invalid.
+ ValueError: If ``direction`` is not one of :attr:`ASCENDING` or
+ :attr:`DESCENDING`.
+ """
+ field_path_module.split_field_path(field_path) # raises
+
+ order_pb = self._make_order(field_path, direction)
+
+ new_orders = self._orders + (order_pb,)
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=new_orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def limit(self, count: int) -> "BaseQuery":
+ """Limit a query to return at most `count` matching results.
+
+ If the current query already has a `limit` set, this will override it.
+ .. note::
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit` will drop previously set `limit_to_last`.
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited query. Acts as a copy of the current query, modified
+ with the newly added "limit" filter.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=count,
+ limit_to_last=False,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def limit_to_last(self, count: int) -> "BaseQuery":
+ """Limit a query to return the last `count` matching results.
+ If the current query already has a `limit_to_last`
+ set, this will override it.
+ .. note::
+ `limit` and `limit_to_last` are mutually exclusive.
+ Setting `limit_to_last` will drop previously set `limit`.
+ Args:
+ count (int): Maximum number of documents to return that match
+ the query.
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A limited query. Acts as a copy of the current query, modified
+ with the newly added "limit" filter.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=count,
+ limit_to_last=True,
+ offset=self._offset,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def offset(self, num_to_skip: int) -> "BaseQuery":
+ """Skip to an offset in a query.
+
+ If the current query already has specified an offset, this will
+ overwrite it.
+
+ Args:
+ num_to_skip (int): The number of results to skip at the beginning
+ of query results. (Must be non-negative.)
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ An offset query. Acts as a copy of the current query, modified
+ with the newly added "offset" field.
+ """
+ return self.__class__(
+ self._parent,
+ projection=self._projection,
+ field_filters=self._field_filters,
+ orders=self._orders,
+ limit=self._limit,
+ limit_to_last=self._limit_to_last,
+ offset=num_to_skip,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+
+ def _check_snapshot(self, document_snapshot) -> None:
+ """Validate local snapshots for non-collection-group queries.
+
+ Raises:
+ ValueError: for non-collection-group queries, if the snapshot
+ is from a different collection.
+ """
+ if self._all_descendants:
+ return
+
+ if document_snapshot.reference._path[:-1] != self._parent._path:
+ raise ValueError("Cannot use snapshot from another collection as a cursor.")
+
+ def _cursor_helper(
+ self,
+ document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple],
+ before: bool,
+ start: bool,
+ ) -> "BaseQuery":
+ """Set values to be used for a ``start_at`` or ``end_at`` cursor.
+
+ The values will later be used in a query protobuf.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+ before (bool): Flag indicating if the document in
+ ``document_fields_or_snapshot`` should (:data:`False`) or
+ shouldn't (:data:`True`) be included in the result set.
+ start (Optional[bool]): determines if the cursor is a ``start_at``
+ cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`).
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "start at" cursor.
+ """
+ if isinstance(document_fields_or_snapshot, tuple):
+ document_fields_or_snapshot = list(document_fields_or_snapshot)
+ elif isinstance(document_fields_or_snapshot, document.DocumentSnapshot):
+ self._check_snapshot(document_fields_or_snapshot)
+ else:
+ # NOTE: We copy so that the caller can't modify after calling.
+ document_fields_or_snapshot = copy.deepcopy(document_fields_or_snapshot)
+
+ cursor_pair = document_fields_or_snapshot, before
+ query_kwargs = {
+ "projection": self._projection,
+ "field_filters": self._field_filters,
+ "orders": self._orders,
+ "limit": self._limit,
+ "offset": self._offset,
+ "all_descendants": self._all_descendants,
+ }
+ if start:
+ query_kwargs["start_at"] = cursor_pair
+ query_kwargs["end_at"] = self._end_at
+ else:
+ query_kwargs["start_at"] = self._start_at
+ query_kwargs["end_at"] = cursor_pair
+
+ return self.__class__(self._parent, **query_kwargs)
+
+ def start_at(
+ self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> "BaseQuery":
+ """Start query results at a particular document value.
+
+ The result set will **include** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified a start cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.start_after` -- this
+ will overwrite it.
+
+ When the query is sent to the server, the ``document_fields`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as
+ a copy of the current query, modified with the newly added
+ "start at" cursor.
+ """
+ return self._cursor_helper(document_fields_or_snapshot, before=True, start=True)
+
+ def start_after(
+ self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> "BaseQuery":
+ """Start query results after a particular document value.
+
+ The result set will **exclude** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified a start cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.start_at` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "start after" cursor.
+ """
+ return self._cursor_helper(
+ document_fields_or_snapshot, before=False, start=True
+ )
+
+ def end_before(
+ self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> "BaseQuery":
+ """End query results before a particular document value.
+
+ The result set will **exclude** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified an end cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.end_at` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "end before" cursor.
+ """
+ return self._cursor_helper(
+ document_fields_or_snapshot, before=True, start=False
+ )
+
+ def end_at(
+ self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
+ ) -> "BaseQuery":
+ """End query results at a particular document value.
+
+ The result set will **include** the document specified by
+ ``document_fields_or_snapshot``.
+
+ If the current query already has specified an end cursor -- either
+ via this method or
+ :meth:`~google.cloud.firestore_v1.query.Query.end_before` -- this will
+ overwrite it.
+
+ When the query is sent to the server, the ``document_fields_or_snapshot`` will
+ be used in the order given by fields set by
+ :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+
+ Args:
+ document_fields_or_snapshot
+ (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
+ a document snapshot or a dictionary/list/tuple of fields
+ representing a query results cursor. A cursor is a collection
+ of values that represent a position in a query result set.
+
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.Query`:
+ A query with cursor. Acts as a copy of the current query, modified
+ with the newly added "end at" cursor.
+ """
+ return self._cursor_helper(
+ document_fields_or_snapshot, before=False, start=False
+ )
+
+ def _filters_pb(self) -> StructuredQuery.Filter:
+ """Convert all the filters into a single generic Filter protobuf.
+
+ This may be a lone field filter or unary filter, may be a composite
+ filter or may be :data:`None`.
+
+ Returns:
+ :class:`google.cloud.firestore_v1.types.StructuredQuery.Filter`:
+ A "generic" filter representing the current query's filters.
+ """
+ num_filters = len(self._field_filters)
+ if num_filters == 0:
+ return None
+ elif num_filters == 1:
+ return _filter_pb(self._field_filters[0])
+ else:
+ composite_filter = query.StructuredQuery.CompositeFilter(
+ op=StructuredQuery.CompositeFilter.Operator.AND,
+ filters=[_filter_pb(filter_) for filter_ in self._field_filters],
+ )
+ return query.StructuredQuery.Filter(composite_filter=composite_filter)
+
+ @staticmethod
+ def _normalize_projection(projection) -> StructuredQuery.Projection:
+ """Helper: convert field paths to message."""
+ if projection is not None:
+
+ fields = list(projection.fields)
+
+ if not fields:
+ field_ref = query.StructuredQuery.FieldReference(field_path="__name__")
+ return query.StructuredQuery.Projection(fields=[field_ref])
+
+ return projection
+
+ def _normalize_orders(self) -> list:
+ """Helper: adjust orders based on cursors, where clauses."""
+ orders = list(self._orders)
+ _has_snapshot_cursor = False
+
+ if self._start_at:
+ if isinstance(self._start_at[0], document.DocumentSnapshot):
+ _has_snapshot_cursor = True
+
+ if self._end_at:
+ if isinstance(self._end_at[0], document.DocumentSnapshot):
+ _has_snapshot_cursor = True
+
+ if _has_snapshot_cursor:
+ should_order = [
+ _enum_from_op_string(key)
+ for key in _COMPARISON_OPERATORS
+ if key not in (_EQ_OP, "array_contains")
+ ]
+ order_keys = [order.field.field_path for order in orders]
+ for filter_ in self._field_filters:
+ field = filter_.field.field_path
+ if filter_.op in should_order and field not in order_keys:
+ orders.append(self._make_order(field, "ASCENDING"))
+ if not orders:
+ orders.append(self._make_order("__name__", "ASCENDING"))
+ else:
+ order_keys = [order.field.field_path for order in orders]
+ if "__name__" not in order_keys:
+ direction = orders[-1].direction # enum?
+ orders.append(self._make_order("__name__", direction))
+
+ return orders
+
+ def _normalize_cursor(self, cursor, orders) -> Optional[Tuple[Any, Any]]:
+ """Helper: convert cursor to a list of values based on orders."""
+ if cursor is None:
+ return
+
+ if not orders:
+ raise ValueError(_NO_ORDERS_FOR_CURSOR)
+
+ document_fields, before = cursor
+
+ order_keys = [order.field.field_path for order in orders]
+
+ if isinstance(document_fields, document.DocumentSnapshot):
+ snapshot = document_fields
+ document_fields = snapshot.to_dict()
+ document_fields["__name__"] = snapshot.reference
+
+ if isinstance(document_fields, dict):
+ # Transform to list using orders
+ values = []
+ data = document_fields
+ for order_key in order_keys:
+ try:
+ if order_key in data:
+ values.append(data[order_key])
+ else:
+ values.append(
+ field_path_module.get_nested_value(order_key, data)
+ )
+ except KeyError:
+ msg = _MISSING_ORDER_BY.format(order_key, data)
+ raise ValueError(msg)
+ document_fields = values
+
+ if len(document_fields) != len(orders):
+ msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys)
+ raise ValueError(msg)
+
+ _transform_bases = (transforms.Sentinel, transforms._ValueList)
+
+ for index, key_field in enumerate(zip(order_keys, document_fields)):
+ key, field = key_field
+
+ if isinstance(field, _transform_bases):
+ msg = _INVALID_CURSOR_TRANSFORM
+ raise ValueError(msg)
+
+ if key == "__name__" and isinstance(field, str):
+ document_fields[index] = self._parent.document(field)
+
+ return document_fields, before
+
+ def _to_protobuf(self) -> StructuredQuery:
+ """Convert the current query into the equivalent protobuf.
+
+ Returns:
+ :class:`google.cloud.firestore_v1.types.StructuredQuery`:
+ The query protobuf.
+ """
+ projection = self._normalize_projection(self._projection)
+ orders = self._normalize_orders()
+ start_at = self._normalize_cursor(self._start_at, orders)
+ end_at = self._normalize_cursor(self._end_at, orders)
+
+ query_kwargs = {
+ "select": projection,
+ "from_": [
+ query.StructuredQuery.CollectionSelector(
+ collection_id=self._parent.id, all_descendants=self._all_descendants
+ )
+ ],
+ "where": self._filters_pb(),
+ "order_by": orders,
+ "start_at": _cursor_pb(start_at),
+ "end_at": _cursor_pb(end_at),
+ }
+ if self._offset is not None:
+ query_kwargs["offset"] = self._offset
+ if self._limit is not None:
+ query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit)
+
+ return query.StructuredQuery(**query_kwargs)
+
+ def get(
+ self, transaction=None, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def _prep_stream(
+ self, transaction=None, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[dict, str, dict]:
+ """Shared setup for async / sync :meth:`stream`"""
+ if self._limit_to_last:
+ raise ValueError(
+ "Query results for queries that include limit_to_last() "
+ "constraints cannot be streamed. Use Query.get() instead."
+ )
+
+ parent_path, expected_prefix = self._parent._parent_info()
+ request = {
+ "parent": parent_path,
+ "structured_query": self._to_protobuf(),
+ "transaction": _helpers.get_transaction_id(transaction),
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, expected_prefix, kwargs
+
+ def stream(
+ self, transaction=None, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def on_snapshot(self, callback) -> NoReturn:
+ raise NotImplementedError
+
+ def _comparator(self, doc1, doc2) -> int:
+ _orders = self._orders
+
+ # Add implicit sorting by name, using the last specified direction.
+ if len(_orders) == 0:
+ lastDirection = BaseQuery.ASCENDING
+ else:
+ if _orders[-1].direction == 1:
+ lastDirection = BaseQuery.ASCENDING
+ else:
+ lastDirection = BaseQuery.DESCENDING
+
+ orderBys = list(_orders)
+
+ order_pb = query.StructuredQuery.Order(
+ field=query.StructuredQuery.FieldReference(field_path="id"),
+ direction=_enum_from_direction(lastDirection),
+ )
+ orderBys.append(order_pb)
+
+ for orderBy in orderBys:
+ if orderBy.field.field_path == "id":
+ # If ordering by docuent id, compare resource paths.
+ comp = Order()._compare_to(doc1.reference._path, doc2.reference._path)
+ else:
+ if (
+ orderBy.field.field_path not in doc1._data
+ or orderBy.field.field_path not in doc2._data
+ ):
+ raise ValueError(
+ "Can only compare fields that exist in the "
+ "DocumentSnapshot. Please include the fields you are "
+ "ordering on in your select() call."
+ )
+ v1 = doc1._data[orderBy.field.field_path]
+ v2 = doc2._data[orderBy.field.field_path]
+ encoded_v1 = _helpers.encode_value(v1)
+ encoded_v2 = _helpers.encode_value(v2)
+ comp = Order().compare(encoded_v1, encoded_v2)
+
+ if comp != 0:
+ # 1 == Ascending, -1 == Descending
+ return orderBy.direction * comp
+
+ return 0
+
+
+def _enum_from_op_string(op_string: str) -> int:
+ """Convert a string representation of a binary operator to an enum.
+
+ These enums come from the protobuf message definition
+ ``StructuredQuery.FieldFilter.Operator``.
+
+ Args:
+ op_string (str): A comparison operation in the form of a string.
+ Acceptable values are ``<``, ``<=``, ``==``, ``!=``, ``>=``
+ and ``>``.
+
+ Returns:
+ int: The enum corresponding to ``op_string``.
+
+ Raises:
+ ValueError: If ``op_string`` is not a valid operator.
+ """
+ try:
+ return _COMPARISON_OPERATORS[op_string]
+ except KeyError:
+ choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys()))
+ msg = _BAD_OP_STRING.format(op_string, choices)
+ raise ValueError(msg)
+
+
+def _isnan(value) -> bool:
+ """Check if a value is NaN.
+
+ This differs from ``math.isnan`` in that **any** input type is
+ allowed.
+
+ Args:
+ value (Any): A value to check for NaN-ness.
+
+ Returns:
+ bool: Indicates if the value is the NaN float.
+ """
+ if isinstance(value, float):
+ return math.isnan(value)
+ else:
+ return False
+
+
+def _enum_from_direction(direction: str) -> int:
+ """Convert a string representation of a direction to an enum.
+
+ Args:
+ direction (str): A direction to order by. Must be one of
+ :attr:`~google.cloud.firestore.BaseQuery.ASCENDING` or
+ :attr:`~google.cloud.firestore.BaseQuery.DESCENDING`.
+
+ Returns:
+ int: The enum corresponding to ``direction``.
+
+ Raises:
+ ValueError: If ``direction`` is not a valid direction.
+ """
+ if isinstance(direction, int):
+ return direction
+
+ if direction == BaseQuery.ASCENDING:
+ return StructuredQuery.Direction.ASCENDING
+ elif direction == BaseQuery.DESCENDING:
+ return StructuredQuery.Direction.DESCENDING
+ else:
+ msg = _BAD_DIR_STRING.format(
+ direction, BaseQuery.ASCENDING, BaseQuery.DESCENDING
+ )
+ raise ValueError(msg)
+
+
+def _filter_pb(field_or_unary) -> StructuredQuery.Filter:
+ """Convert a specific protobuf filter to the generic filter type.
+
+ Args:
+ field_or_unary (Union[google.cloud.proto.firestore.v1.\
+ query.StructuredQuery.FieldFilter, google.cloud.proto.\
+ firestore.v1.query.StructuredQuery.FieldFilter]): A
+ field or unary filter to convert to a generic filter.
+
+ Returns:
+ google.cloud.firestore_v1.types.\
+ StructuredQuery.Filter: A "generic" filter.
+
+ Raises:
+ ValueError: If ``field_or_unary`` is not a field or unary filter.
+ """
+ if isinstance(field_or_unary, query.StructuredQuery.FieldFilter):
+ return query.StructuredQuery.Filter(field_filter=field_or_unary)
+ elif isinstance(field_or_unary, query.StructuredQuery.UnaryFilter):
+ return query.StructuredQuery.Filter(unary_filter=field_or_unary)
+ else:
+ raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary)
+
+
+def _cursor_pb(cursor_pair: Tuple[list, bool]) -> Optional[Cursor]:
+ """Convert a cursor pair to a protobuf.
+
+ If ``cursor_pair`` is :data:`None`, just returns :data:`None`.
+
+ Args:
+ cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of
+
+ * a list of field values.
+ * a ``before`` flag
+
+ Returns:
+ Optional[google.cloud.firestore_v1.types.Cursor]: A
+ protobuf cursor corresponding to the values.
+ """
+ if cursor_pair is not None:
+ data, before = cursor_pair
+ value_pbs = [_helpers.encode_value(value) for value in data]
+ return query.Cursor(values=value_pbs, before=before)
+
+
+def _query_response_to_snapshot(
+ response_pb: RunQueryResponse, collection, expected_prefix: str
+) -> Optional[document.DocumentSnapshot]:
+ """Parse a query response protobuf to a document snapshot.
+
+ Args:
+ response_pb (google.cloud.proto.firestore.v1.\
+ firestore.RunQueryResponse): A
+ collection (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ A reference to the collection that initiated the query.
+ expected_prefix (str): The expected prefix for fully-qualified
+ document names returned in the query results. This can be computed
+ directly from ``collection`` via :meth:`_parent_info`.
+
+ Returns:
+ Optional[:class:`~google.cloud.firestore.document.DocumentSnapshot`]:
+ A snapshot of the data returned in the query. If
+ ``response_pb.document`` is not set, the snapshot will be :data:`None`.
+ """
+ if not response_pb._pb.HasField("document"):
+ return None
+
+ document_id = _helpers.get_doc_id(response_pb.document, expected_prefix)
+ reference = collection.document(document_id)
+ data = _helpers.decode_dict(response_pb.document.fields, collection._client)
+ snapshot = document.DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=response_pb.read_time,
+ create_time=response_pb.document.create_time,
+ update_time=response_pb.document.update_time,
+ )
+ return snapshot
+
+
+def _collection_group_query_response_to_snapshot(
+ response_pb: RunQueryResponse, collection
+) -> Optional[document.DocumentSnapshot]:
+ """Parse a query response protobuf to a document snapshot.
+
+ Args:
+ response_pb (google.cloud.proto.firestore.v1.\
+ firestore.RunQueryResponse): A
+ collection (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ A reference to the collection that initiated the query.
+
+ Returns:
+ Optional[:class:`~google.cloud.firestore.document.DocumentSnapshot`]:
+ A snapshot of the data returned in the query. If
+ ``response_pb.document`` is not set, the snapshot will be :data:`None`.
+ """
+ if not response_pb._pb.HasField("document"):
+ return None
+ reference = collection._client.document(response_pb.document.name)
+ data = _helpers.decode_dict(response_pb.document.fields, collection._client)
+ snapshot = document.DocumentSnapshot(
+ reference,
+ data,
+ exists=True,
+ read_time=response_pb._pb.read_time,
+ create_time=response_pb._pb.document.create_time,
+ update_time=response_pb._pb.document.update_time,
+ )
+ return snapshot
+
+
+class BaseCollectionGroup(BaseQuery):
+ """Represents a Collection Group in the Firestore API.
+
+ This is a specialization of :class:`.Query` that includes all documents in the
+ database that are contained in a collection or subcollection of the given
+ parent.
+
+ Args:
+ parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ The collection that this query applies to.
+ """
+
+ _PARTITION_QUERY_ORDER = (
+ BaseQuery._make_order(
+ field_path_module.FieldPath.document_id(), BaseQuery.ASCENDING,
+ ),
+ )
+
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ limit_to_last=False,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ all_descendants=True,
+ ) -> None:
+ if not all_descendants:
+ raise ValueError("all_descendants must be True for collection group query.")
+
+ super(BaseCollectionGroup, self).__init__(
+ parent=parent,
+ projection=projection,
+ field_filters=field_filters,
+ orders=orders,
+ limit=limit,
+ limit_to_last=limit_to_last,
+ offset=offset,
+ start_at=start_at,
+ end_at=end_at,
+ all_descendants=all_descendants,
+ )
+
+ def _validate_partition_query(self):
+ if self._field_filters:
+ raise ValueError("Can't partition query with filters.")
+
+ if self._projection:
+ raise ValueError("Can't partition query with projection.")
+
+ if self._limit:
+ raise ValueError("Can't partition query with limit.")
+
+ if self._offset:
+ raise ValueError("Can't partition query with offset.")
+
+ def _get_query_class(self):
+ raise NotImplementedError
+
+ def _prep_get_partitions(
+ self, partition_count, retry: retries.Retry = None, timeout: float = None,
+ ) -> Tuple[dict, dict]:
+ self._validate_partition_query()
+ parent_path, expected_prefix = self._parent._parent_info()
+ klass = self._get_query_class()
+ query = klass(
+ self._parent,
+ orders=self._PARTITION_QUERY_ORDER,
+ start_at=self._start_at,
+ end_at=self._end_at,
+ all_descendants=self._all_descendants,
+ )
+ request = {
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "partition_count": partition_count,
+ }
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ return request, kwargs
+
+ def get_partitions(
+ self, partition_count, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+
+class QueryPartition:
+ """Represents a bounded partition of a collection group query.
+
+ Contains cursors that can be used in a query as a starting and/or end point for the
+ collection group query. The cursors may only be used in a query that matches the
+ constraints of the query that produced this partition.
+
+ Args:
+ query (BaseQuery): The original query that this is a partition of.
+ start_at (Optional[~google.cloud.firestore_v1.document.DocumentSnapshot]):
+ Cursor for first query result to include. If `None`, the partition starts at
+ the beginning of the result set.
+ end_at (Optional[~google.cloud.firestore_v1.document.DocumentSnapshot]):
+ Cursor for first query result after the last result included in the
+ partition. If `None`, the partition runs to the end of the result set.
+
+ """
+
+ def __init__(self, query, start_at, end_at):
+ self._query = query
+ self._start_at = start_at
+ self._end_at = end_at
+
+ @property
+ def start_at(self):
+ return self._start_at
+
+ @property
+ def end_at(self):
+ return self._end_at
+
+ def query(self):
+ """Generate a new query using this partition's bounds.
+
+ Returns:
+ BaseQuery: Copy of the original query with start and end bounds set by the
+ cursors from this partition.
+ """
+ query = self._query
+ start_at = ([self.start_at], True) if self.start_at else None
+ end_at = ([self.end_at], True) if self.end_at else None
+
+ return type(query)(
+ query._parent,
+ all_descendants=query._all_descendants,
+ orders=query._PARTITION_QUERY_ORDER,
+ start_at=start_at,
+ end_at=end_at,
+ )
diff --git a/google/cloud/firestore_v1/base_transaction.py b/google/cloud/firestore_v1/base_transaction.py
new file mode 100644
index 0000000000..5eac1d7fe6
--- /dev/null
+++ b/google/cloud/firestore_v1/base_transaction.py
@@ -0,0 +1,186 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for applying Google Cloud Firestore changes in a transaction."""
+
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1 import types
+from typing import Any, Coroutine, NoReturn, Optional, Union
+
+_CANT_BEGIN: str
+_CANT_COMMIT: str
+_CANT_RETRY_READ_ONLY: str
+_CANT_ROLLBACK: str
+_EXCEED_ATTEMPTS_TEMPLATE: str
+_INITIAL_SLEEP: float
+_MAX_SLEEP: float
+_MISSING_ID_TEMPLATE: str
+_MULTIPLIER: float
+_WRITE_READ_ONLY: str
+
+
+MAX_ATTEMPTS = 5
+"""int: Default number of transaction attempts (with retries)."""
+_CANT_BEGIN: str = "The transaction has already begun. Current transaction ID: {!r}."
+_MISSING_ID_TEMPLATE: str = "The transaction has no transaction ID, so it cannot be {}."
+_CANT_ROLLBACK: str = _MISSING_ID_TEMPLATE.format("rolled back")
+_CANT_COMMIT: str = _MISSING_ID_TEMPLATE.format("committed")
+_WRITE_READ_ONLY: str = "Cannot perform write operation in read-only transaction."
+_INITIAL_SLEEP: float = 1.0
+"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`."""
+_MAX_SLEEP: float = 30.0
+"""float: Eventual "max" sleep time. To be used in :func:`_sleep`."""
+_MULTIPLIER: float = 2.0
+"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`."""
+_EXCEED_ATTEMPTS_TEMPLATE: str = "Failed to commit transaction in {:d} attempts."
+_CANT_RETRY_READ_ONLY: str = "Only read-write transactions can be retried."
+
+
+class BaseTransaction(object):
+ """Accumulate read-and-write operations to be sent in a transaction.
+
+ Args:
+ max_attempts (Optional[int]): The maximum number of attempts for
+ the transaction (i.e. allowing retries). Defaults to
+ :attr:`~google.cloud.firestore_v1.transaction.MAX_ATTEMPTS`.
+ read_only (Optional[bool]): Flag indicating if the transaction
+ should be read-only or should allow writes. Defaults to
+ :data:`False`.
+ """
+
+ def __init__(self, max_attempts=MAX_ATTEMPTS, read_only=False) -> None:
+ self._max_attempts = max_attempts
+ self._read_only = read_only
+ self._id = None
+
+ def _add_write_pbs(self, write_pbs) -> NoReturn:
+ raise NotImplementedError
+
+ def _options_protobuf(
+ self, retry_id: Union[bytes, None]
+ ) -> Optional[types.common.TransactionOptions]:
+ """Convert the current object to protobuf.
+
+ The ``retry_id`` value is used when retrying a transaction that
+ failed (e.g. due to contention). It is intended to be the "first"
+ transaction that failed (i.e. if multiple retries are needed).
+
+ Args:
+ retry_id (Union[bytes, NoneType]): Transaction ID of a transaction
+ to be retried.
+
+ Returns:
+ Optional[google.cloud.firestore_v1.types.TransactionOptions]:
+ The protobuf ``TransactionOptions`` if ``read_only==True`` or if
+ there is a transaction ID to be retried, else :data:`None`.
+
+ Raises:
+ ValueError: If ``retry_id`` is not :data:`None` but the
+ transaction is read-only.
+ """
+ if retry_id is not None:
+ if self._read_only:
+ raise ValueError(_CANT_RETRY_READ_ONLY)
+
+ return types.TransactionOptions(
+ read_write=types.TransactionOptions.ReadWrite(
+ retry_transaction=retry_id
+ )
+ )
+ elif self._read_only:
+ return types.TransactionOptions(
+ read_only=types.TransactionOptions.ReadOnly()
+ )
+ else:
+ return None
+
+ @property
+ def in_progress(self):
+ """Determine if this transaction has already begun.
+
+ Returns:
+ bool: Indicates if the transaction has started.
+ """
+ return self._id is not None
+
+ @property
+ def id(self):
+ """Get the current transaction ID.
+
+ Returns:
+ Optional[bytes]: The transaction ID (or :data:`None` if the
+ current transaction is not in progress).
+ """
+ return self._id
+
+ def _clean_up(self) -> None:
+ """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``.
+
+ This intended to occur on success or failure of the associated RPCs.
+ """
+ self._write_pbs = []
+ self._id = None
+
+ def _begin(self, retry_id=None) -> NoReturn:
+ raise NotImplementedError
+
+ def _rollback(self) -> NoReturn:
+ raise NotImplementedError
+
+ def _commit(self) -> Union[list, Coroutine[Any, Any, list]]:
+ raise NotImplementedError
+
+ def get_all(
+ self, references: list, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+ def get(
+ self, ref_or_query, retry: retries.Retry = None, timeout: float = None,
+ ) -> NoReturn:
+ raise NotImplementedError
+
+
+class _BaseTransactional(object):
+ """Provide a callable object to use as a transactional decorater.
+
+ This is surfaced via
+ :func:`~google.cloud.firestore_v1.transaction.transactional`.
+
+ Args:
+ to_wrap (Callable[[:class:`~google.cloud.firestore_v1.transaction.Transaction`, ...], Any]):
+ A callable that should be run (and retried) in a transaction.
+ """
+
+ def __init__(self, to_wrap) -> None:
+ self.to_wrap = to_wrap
+ self.current_id = None
+ """Optional[bytes]: The current transaction ID."""
+ self.retry_id = None
+ """Optional[bytes]: The ID of the first attempted transaction."""
+
+ def _reset(self) -> None:
+ """Unset the transaction IDs."""
+ self.current_id = None
+ self.retry_id = None
+
+ def _pre_commit(self, transaction, *args, **kwargs) -> NoReturn:
+ raise NotImplementedError
+
+ def _maybe_commit(self, transaction) -> NoReturn:
+ raise NotImplementedError
+
+ def __call__(self, transaction, *args, **kwargs):
+ raise NotImplementedError
diff --git a/google/cloud/firestore_v1/batch.py b/google/cloud/firestore_v1/batch.py
index 56483af10c..1758051228 100644
--- a/google/cloud/firestore_v1/batch.py
+++ b/google/cloud/firestore_v1/batch.py
@@ -14,11 +14,13 @@
"""Helpers for batch requests to the Google Cloud Firestore API."""
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
-from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1.base_batch import BaseWriteBatch
-class WriteBatch(object):
+class WriteBatch(BaseWriteBatch):
"""Accumulate write operations to be sent in a batch.
This has the same set of methods for write operations that
@@ -30,126 +32,36 @@ class WriteBatch(object):
The client that created this batch.
"""
- def __init__(self, client):
- self._client = client
- self._write_pbs = []
- self.write_results = None
- self.commit_time = None
-
- def _add_write_pbs(self, write_pbs):
- """Add `Write`` protobufs to this transaction.
-
- This method intended to be over-ridden by subclasses.
-
- Args:
- write_pbs (List[google.cloud.proto.firestore.v1.\
- write_pb2.Write]): A list of write protobufs to be added.
- """
- self._write_pbs.extend(write_pbs)
-
- def create(self, reference, document_data):
- """Add a "change" to this batch to create a document.
-
- If the document given by ``reference`` already exists, then this
- batch will fail when :meth:`commit`-ed.
-
- Args:
- reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
- A document reference to be created in this batch.
- document_data (dict): Property names and values to use for
- creating a document.
- """
- write_pbs = _helpers.pbs_for_create(reference._document_path, document_data)
- self._add_write_pbs(write_pbs)
-
- def set(self, reference, document_data, merge=False):
- """Add a "change" to replace a document.
-
- See
- :meth:`google.cloud.firestore_v1.document.DocumentReference.set` for
- more information on how ``option`` determines how the change is
- applied.
-
- Args:
- reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
- A document reference that will have values set in this batch.
- document_data (dict):
- Property names and values to use for replacing a document.
- merge (Optional[bool] or Optional[List]):
- If True, apply merging instead of overwriting the state
- of the document.
- """
- if merge is not False:
- write_pbs = _helpers.pbs_for_set_with_merge(
- reference._document_path, document_data, merge
- )
- else:
- write_pbs = _helpers.pbs_for_set_no_merge(
- reference._document_path, document_data
- )
-
- self._add_write_pbs(write_pbs)
-
- def update(self, reference, field_updates, option=None):
- """Add a "change" to update a document.
-
- See
- :meth:`google.cloud.firestore_v1.document.DocumentReference.update`
- for more information on ``field_updates`` and ``option``.
-
- Args:
- reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
- A document reference that will be updated in this batch.
- field_updates (dict):
- Field names or paths to update and values to update with.
- option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
- A write option to make assertions / preconditions on the server
- state of the document before applying changes.
- """
- if option.__class__.__name__ == "ExistsOption":
- raise ValueError("you must not pass an explicit write option to " "update.")
- write_pbs = _helpers.pbs_for_update(
- reference._document_path, field_updates, option
- )
- self._add_write_pbs(write_pbs)
+ def __init__(self, client) -> None:
+ super(WriteBatch, self).__init__(client=client)
- def delete(self, reference, option=None):
- """Add a "change" to delete a document.
-
- See
- :meth:`google.cloud.firestore_v1.document.DocumentReference.delete`
- for more information on how ``option`` determines how the change is
- applied.
+ def commit(
+ self, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None
+ ) -> list:
+ """Commit the changes accumulated in this batch.
Args:
- reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
- A document reference that will be deleted in this batch.
- option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
- A write option to make assertions / preconditions on the server
- state of the document before applying changes.
- """
- write_pb = _helpers.pb_for_delete(reference._document_path, option)
- self._add_write_pbs([write_pb])
-
- def commit(self):
- """Commit the changes accumulated in this batch.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
- List[:class:`google.cloud.proto.firestore.v1.write_pb2.WriteResult`, ...]:
+ List[:class:`google.cloud.proto.firestore.v1.write.WriteResult`, ...]:
The write results corresponding to the changes committed, returned
in the same order as the changes were applied to this batch. A
write result contains an ``update_time`` field.
"""
+ request, kwargs = self._prep_commit(retry, timeout)
+
commit_response = self._client._firestore_api.commit(
- self._client._database_string,
- self._write_pbs,
- transaction=None,
- metadata=self._client._rpc_metadata,
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
)
self._write_pbs = []
self.write_results = results = list(commit_response.write_results)
self.commit_time = commit_response.commit_time
+
return results
def __enter__(self):
diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py
index da09b9ff44..6ad5f76e64 100644
--- a/google/cloud/firestore_v1/client.py
+++ b/google/cloud/firestore_v1/client.py
@@ -23,41 +23,34 @@
* a :class:`~google.cloud.firestore_v1.client.Client` owns a
:class:`~google.cloud.firestore_v1.document.DocumentReference`
"""
-import os
-import google.api_core.client_options
-from google.api_core.gapic_v1 import client_info
-from google.cloud.client import ClientWithProject
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
-from google.cloud.firestore_v1 import _helpers
-from google.cloud.firestore_v1 import __version__
-from google.cloud.firestore_v1 import query
-from google.cloud.firestore_v1 import types
+from google.cloud.firestore_v1.base_client import (
+ BaseClient,
+ DEFAULT_DATABASE,
+ _CLIENT_INFO,
+ _parse_batch_get,
+ _path_helper,
+)
+
+from google.cloud.firestore_v1.query import CollectionGroup
from google.cloud.firestore_v1.batch import WriteBatch
from google.cloud.firestore_v1.collection import CollectionReference
from google.cloud.firestore_v1.document import DocumentReference
-from google.cloud.firestore_v1.document import DocumentSnapshot
-from google.cloud.firestore_v1.field_path import render_field_path
-from google.cloud.firestore_v1.gapic import firestore_client
-from google.cloud.firestore_v1.gapic.transports import firestore_grpc_transport
from google.cloud.firestore_v1.transaction import Transaction
-
-
-DEFAULT_DATABASE = "(default)"
-"""str: The default database used in a :class:`~google.cloud.firestore_v1.client.Client`."""
-_BAD_OPTION_ERR = (
- "Exactly one of ``last_update_time`` or ``exists`` " "must be provided."
-)
-_BAD_DOC_TEMPLATE = (
- "Document {!r} appeared in response but was not present among references"
+from google.cloud.firestore_v1.services.firestore import client as firestore_client
+from google.cloud.firestore_v1.services.firestore.transports import (
+ grpc as firestore_grpc_transport,
)
-_ACTIVE_TXN = "There is already an active transaction."
-_INACTIVE_TXN = "There is no active transaction."
-_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__)
-_FIRESTORE_EMULATOR_HOST = "FIRESTORE_EMULATOR_HOST"
+from typing import Any, Generator, Iterable, Tuple
+
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
-class Client(ClientWithProject):
+class Client(BaseClient):
"""Client for interacting with Google Cloud Firestore API.
.. note::
@@ -85,16 +78,6 @@ class Client(ClientWithProject):
should be set through client_options.
"""
- SCOPE = (
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/datastore",
- )
- """The scopes required for authenticating with the Firestore service."""
-
- _firestore_api_internal = None
- _database_string_internal = None
- _rpc_metadata_internal = None
-
def __init__(
self,
project=None,
@@ -102,117 +85,39 @@ def __init__(
database=DEFAULT_DATABASE,
client_info=_CLIENT_INFO,
client_options=None,
- ):
- # NOTE: This API has no use for the _http argument, but sending it
- # will have no impact since the _http() @property only lazily
- # creates a working HTTP object.
+ ) -> None:
super(Client, self).__init__(
- project=project, credentials=credentials, _http=None
+ project=project,
+ credentials=credentials,
+ database=database,
+ client_info=client_info,
+ client_options=client_options,
)
- self._client_info = client_info
- if client_options:
- if type(client_options) == dict:
- client_options = google.api_core.client_options.from_dict(
- client_options
- )
- self._client_options = client_options
-
- self._database = database
- self._emulator_host = os.getenv(_FIRESTORE_EMULATOR_HOST)
@property
def _firestore_api(self):
"""Lazy-loading getter GAPIC Firestore API.
-
Returns:
:class:`~google.cloud.gapic.firestore.v1`.firestore_client.FirestoreClient:
- CollectionReference:
"""Get a reference to a collection.
For a top-level collection:
@@ -241,14 +146,9 @@ def collection(self, *collection_path):
:class:`~google.cloud.firestore_v1.collection.CollectionReference`:
A reference to a collection in the Firestore database.
"""
- if len(collection_path) == 1:
- path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
- else:
- path = collection_path
-
- return CollectionReference(*path, client=self)
+ return CollectionReference(*_path_helper(collection_path), client=self)
- def collection_group(self, collection_id):
+ def collection_group(self, collection_id: str) -> CollectionGroup:
"""
Creates and returns a new Query that includes all documents in the
database that are contained in a collection or subcollection with the
@@ -258,22 +158,19 @@ def collection_group(self, collection_id):
>>> query = client.collection_group('mygroup')
- @param {string} collectionId Identifies the collections to query over.
- Every collection or subcollection with this ID as the last segment of its
- path will be included. Cannot contain a slash.
- @returns {Query} The created Query.
- """
- if "/" in collection_id:
- raise ValueError(
- "Invalid collection_id "
- + collection_id
- + ". Collection IDs must not contain '/'."
- )
+ Args:
+ collection_id (str) Identifies the collections to query over.
+
+ Every collection or subcollection with this ID as the last segment of its
+ path will be included. Cannot contain a slash.
- collection = self.collection(collection_id)
- return query.Query(collection, all_descendants=True)
+ Returns:
+ :class:`~google.cloud.firestore_v1.query.CollectionGroup`:
+ The created Query.
+ """
+ return CollectionGroup(self._get_collection_reference(collection_id))
- def document(self, *document_path):
+ def document(self, *document_path: Tuple[str]) -> DocumentReference:
"""Get a reference to a document in a collection.
For a top-level document:
@@ -304,99 +201,18 @@ def document(self, *document_path):
:class:`~google.cloud.firestore_v1.document.DocumentReference`:
A reference to a document in a collection.
"""
- if len(document_path) == 1:
- path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER)
- else:
- path = document_path
-
- # DocumentReference takes a relative path. Strip the database string if present.
- base_path = self._database_string + "/documents/"
- joined_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path)
- if joined_path.startswith(base_path):
- joined_path = joined_path[len(base_path) :]
- path = joined_path.split(_helpers.DOCUMENT_PATH_DELIMITER)
-
- return DocumentReference(*path, client=self)
-
- @staticmethod
- def field_path(*field_names):
- """Create a **field path** from a list of nested field names.
-
- A **field path** is a ``.``-delimited concatenation of the field
- names. It is used to represent a nested field. For example,
- in the data
-
- .. code-block:: python
-
- data = {
- 'aa': {
- 'bb': {
- 'cc': 10,
- },
- },
- }
-
- the field path ``'aa.bb.cc'`` represents the data stored in
- ``data['aa']['bb']['cc']``.
-
- Args:
- field_names (Tuple[str, ...]): The list of field names.
-
- Returns:
- str: The ``.``-delimited field path.
- """
- return render_field_path(field_names)
-
- @staticmethod
- def write_option(**kwargs):
- """Create a write option for write operations.
-
- Write operations include :meth:`~google.cloud.DocumentReference.set`,
- :meth:`~google.cloud.DocumentReference.update` and
- :meth:`~google.cloud.DocumentReference.delete`.
-
- One of the following keyword arguments must be provided:
-
- * ``last_update_time`` (:class:`google.protobuf.timestamp_pb2.\
- Timestamp`): A timestamp. When set, the target document must
- exist and have been last updated at that time. Protobuf
- ``update_time`` timestamps are typically returned from methods
- that perform write operations as part of a "write result"
- protobuf or directly.
- * ``exists`` (:class:`bool`): Indicates if the document being modified
- should already exist.
-
- Providing no argument would make the option have no effect (so
- it is not allowed). Providing multiple would be an apparent
- contradiction, since ``last_update_time`` assumes that the
- document **was** updated (it can't have been updated if it
- doesn't exist) and ``exists`` indicate that it is unknown if the
- document exists or not.
-
- Args:
- kwargs (Dict[str, Any]): The keyword arguments described above.
-
- Raises:
- TypeError: If anything other than exactly one argument is
- provided by the caller.
+ return DocumentReference(
+ *self._document_path_helper(*document_path), client=self
+ )
- Returns:
- :class:`~google.cloud.firestore_v1.client.WriteOption`:
- The option to be used to configure a write message.
- """
- if len(kwargs) != 1:
- raise TypeError(_BAD_OPTION_ERR)
-
- name, value = kwargs.popitem()
- if name == "last_update_time":
- return _helpers.LastUpdateOption(value)
- elif name == "exists":
- return _helpers.ExistsOption(value)
- else:
- extra = "{!r} was provided".format(name)
- raise TypeError(_BAD_OPTION_ERR, extra)
-
- def get_all(self, references, field_paths=None, transaction=None):
+ def get_all(
+ self,
+ references: list,
+ field_paths: Iterable[str] = None,
+ transaction: Transaction = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[DocumentSnapshot, Any, None]:
"""Retrieve a batch of documents.
.. note::
@@ -426,39 +242,51 @@ def get_all(self, references, field_paths=None, transaction=None):
transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
An existing transaction that these ``references`` will be
retrieved in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Yields:
.DocumentSnapshot: The next document snapshot that fulfills the
query, or :data:`None` if the document does not exist.
"""
- document_paths, reference_map = _reference_info(references)
- mask = _get_doc_mask(field_paths)
+ request, reference_map, kwargs = self._prep_get_all(
+ references, field_paths, transaction, retry, timeout
+ )
+
response_iterator = self._firestore_api.batch_get_documents(
- self._database_string,
- document_paths,
- mask,
- transaction=_helpers.get_transaction_id(transaction),
- metadata=self._rpc_metadata,
+ request=request, metadata=self._rpc_metadata, **kwargs,
)
for get_doc_response in response_iterator:
yield _parse_batch_get(get_doc_response, reference_map, self)
- def collections(self):
+ def collections(
+ self, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None,
+ ) -> Generator[Any, Any, None]:
"""List top-level collections of the client's database.
+ Args:
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
iterator of subcollections of the current document.
"""
+ request, kwargs = self._prep_collections(retry, timeout)
+
iterator = self._firestore_api.list_collection_ids(
- "{}/documents".format(self._database_string), metadata=self._rpc_metadata
+ request=request, metadata=self._rpc_metadata, **kwargs,
)
- iterator.client = self
- iterator.item_to_value = _item_to_collection_ref
- return iterator
- def batch(self):
+ for collection_id in iterator:
+ yield self.collection(collection_id)
+
+ def batch(self) -> WriteBatch:
"""Get a batch instance from this client.
Returns:
@@ -468,7 +296,7 @@ def batch(self):
"""
return WriteBatch(self)
- def transaction(self, **kwargs):
+ def transaction(self, **kwargs) -> Transaction:
"""Get a transaction that uses this client.
See :class:`~google.cloud.firestore_v1.transaction.Transaction` for
@@ -485,135 +313,3 @@ def transaction(self, **kwargs):
A transaction attached to this client.
"""
return Transaction(self, **kwargs)
-
-
-def _reference_info(references):
- """Get information about document references.
-
- Helper for :meth:`~google.cloud.firestore_v1.client.Client.get_all`.
-
- Args:
- references (List[.DocumentReference, ...]): Iterable of document
- references.
-
- Returns:
- Tuple[List[str, ...], Dict[str, .DocumentReference]]: A two-tuple of
-
- * fully-qualified documents paths for each reference in ``references``
- * a mapping from the paths to the original reference. (If multiple
- ``references`` contains multiple references to the same document,
- that key will be overwritten in the result.)
- """
- document_paths = []
- reference_map = {}
- for reference in references:
- doc_path = reference._document_path
- document_paths.append(doc_path)
- reference_map[doc_path] = reference
-
- return document_paths, reference_map
-
-
-def _get_reference(document_path, reference_map):
- """Get a document reference from a dictionary.
-
- This just wraps a simple dictionary look-up with a helpful error that is
- specific to :meth:`~google.cloud.firestore.client.Client.get_all`, the
- **public** caller of this function.
-
- Args:
- document_path (str): A fully-qualified document path.
- reference_map (Dict[str, .DocumentReference]): A mapping (produced
- by :func:`_reference_info`) of fully-qualified document paths to
- document references.
-
- Returns:
- .DocumentReference: The matching reference.
-
- Raises:
- ValueError: If ``document_path`` has not been encountered.
- """
- try:
- return reference_map[document_path]
- except KeyError:
- msg = _BAD_DOC_TEMPLATE.format(document_path)
- raise ValueError(msg)
-
-
-def _parse_batch_get(get_doc_response, reference_map, client):
- """Parse a `BatchGetDocumentsResponse` protobuf.
-
- Args:
- get_doc_response (~google.cloud.proto.firestore.v1.\
- firestore_pb2.BatchGetDocumentsResponse): A single response (from
- a stream) containing the "get" response for a document.
- reference_map (Dict[str, .DocumentReference]): A mapping (produced
- by :func:`_reference_info`) of fully-qualified document paths to
- document references.
- client (:class:`~google.cloud.firestore_v1.client.Client`):
- A client that has a document factory.
-
- Returns:
- [.DocumentSnapshot]: The retrieved snapshot.
-
- Raises:
- ValueError: If the response has a ``result`` field (a oneof) other
- than ``found`` or ``missing``.
- """
- result_type = get_doc_response.WhichOneof("result")
- if result_type == "found":
- reference = _get_reference(get_doc_response.found.name, reference_map)
- data = _helpers.decode_dict(get_doc_response.found.fields, client)
- snapshot = DocumentSnapshot(
- reference,
- data,
- exists=True,
- read_time=get_doc_response.read_time,
- create_time=get_doc_response.found.create_time,
- update_time=get_doc_response.found.update_time,
- )
- elif result_type == "missing":
- reference = _get_reference(get_doc_response.missing, reference_map)
- snapshot = DocumentSnapshot(
- reference,
- None,
- exists=False,
- read_time=get_doc_response.read_time,
- create_time=None,
- update_time=None,
- )
- else:
- raise ValueError(
- "`BatchGetDocumentsResponse.result` (a oneof) had a field other "
- "than `found` or `missing` set, or was unset"
- )
- return snapshot
-
-
-def _get_doc_mask(field_paths):
- """Get a document mask if field paths are provided.
-
- Args:
- field_paths (Optional[Iterable[str, ...]]): An iterable of field
- paths (``.``-delimited list of field names) to use as a
- projection of document fields in the returned results.
-
- Returns:
- Optional[google.cloud.firestore_v1.types.DocumentMask]: A mask
- to project documents to a restricted set of field paths.
- """
- if field_paths is None:
- return None
- else:
- return types.DocumentMask(field_paths=field_paths)
-
-
-def _item_to_collection_ref(iterator, item):
- """Convert collection ID to collection ref.
-
- Args:
- iterator (google.api_core.page_iterator.GRPCIterator):
- iterator response
- item (str): ID of the collection
- """
- return iterator.client.collection(item)
diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py
index 27c3eeaa31..96d076e2c4 100644
--- a/google/cloud/firestore_v1/collection.py
+++ b/google/cloud/firestore_v1/collection.py
@@ -13,20 +13,24 @@
# limitations under the License.
"""Classes for representing collections for the Google Cloud Firestore API."""
-import random
-import warnings
-import six
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
-from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1.base_collection import (
+ BaseCollectionReference,
+ _item_to_document_ref,
+)
from google.cloud.firestore_v1 import query as query_mod
from google.cloud.firestore_v1.watch import Watch
from google.cloud.firestore_v1 import document
+from typing import Any, Callable, Generator, Tuple
-_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.transaction import Transaction
-class CollectionReference(object):
+class CollectionReference(BaseCollectionReference):
"""A reference to a collection in a Firestore database.
The collection may already exist or this class can facilitate creation
@@ -52,86 +56,24 @@ class CollectionReference(object):
TypeError: If a keyword other than ``client`` is used.
"""
- def __init__(self, *path, **kwargs):
- _helpers.verify_path(path, is_collection=True)
- self._path = path
- self._client = kwargs.pop("client", None)
- if kwargs:
- raise TypeError(
- "Received unexpected arguments", kwargs, "Only `client` is supported"
- )
+ def __init__(self, *path, **kwargs) -> None:
+ super(CollectionReference, self).__init__(*path, **kwargs)
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._path == other._path and self._client == other._client
-
- @property
- def id(self):
- """The collection identifier.
-
- Returns:
- str: The last component of the path.
- """
- return self._path[-1]
-
- @property
- def parent(self):
- """Document that owns the current collection.
-
- Returns:
- Optional[:class:`~google.cloud.firestore_v1.document.DocumentReference`]:
- The parent document, if the current collection is not a
- top-level collection.
- """
- if len(self._path) == 1:
- return None
- else:
- parent_path = self._path[:-1]
- return self._client.document(*parent_path)
-
- def document(self, document_id=None):
- """Create a sub-document underneath the current collection.
-
- Args:
- document_id (Optional[str]): The document identifier
- within the current collection. If not provided, will default
- to a random 20 character string composed of digits,
- uppercase and lowercase and letters.
+ def _query(self) -> query_mod.Query:
+ """Query factory.
Returns:
- :class:`~google.cloud.firestore_v1.document.DocumentReference`:
- The child document.
+ :class:`~google.cloud.firestore_v1.query.Query`
"""
- if document_id is None:
- document_id = _auto_id()
+ return query_mod.Query(self)
- child_path = self._path + (document_id,)
- return self._client.document(*child_path)
-
- def _parent_info(self):
- """Get fully-qualified parent path and prefix for this collection.
-
- Returns:
- Tuple[str, str]: Pair of
-
- * the fully-qualified (with database and project) path to the
- parent of this collection (will either be the database path
- or a document path).
- * the prefix to a document in this collection.
- """
- parent_doc = self.parent
- if parent_doc is None:
- parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join(
- (self._client._database_string, "documents")
- )
- else:
- parent_path = parent_doc._document_path
-
- expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id))
- return parent_path, expected_prefix
-
- def add(self, document_data, document_id=None):
+ def add(
+ self,
+ document_data: dict,
+ document_id: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Tuple[Any, Any]:
"""Create a document in the Firestore database with the provided data.
Args:
@@ -142,6 +84,10 @@ def add(self, document_data, document_id=None):
automatically assigned by the server (the assigned ID will be
a random 20 character string composed of digits,
uppercase and lowercase letters).
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
Tuple[:class:`google.protobuf.timestamp_pb2.Timestamp`, \
@@ -155,20 +101,28 @@ def add(self, document_data, document_id=None):
~google.cloud.exceptions.Conflict: If ``document_id`` is provided
and the document already exists.
"""
- if document_id is None:
- document_id = _auto_id()
-
- document_ref = self.document(document_id)
- write_result = document_ref.create(document_data)
+ document_ref, kwargs = self._prep_add(
+ document_data, document_id, retry, timeout,
+ )
+ write_result = document_ref.create(document_data, **kwargs)
return write_result.update_time, document_ref
- def list_documents(self, page_size=None):
+ def list_documents(
+ self,
+ page_size: int = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[Any, Any, None]:
"""List all subdocuments of the current collection.
Args:
page_size (Optional[int]]): The maximum number of documents
- in each page of results from this request. Non-positive values
- are ignored. Defaults to a sensible value set by the API.
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]:
@@ -176,214 +130,50 @@ def list_documents(self, page_size=None):
collection does not exist at the time of `snapshot`, the
iterator will be empty
"""
- parent, _ = self._parent_info()
+ request, kwargs = self._prep_list_documents(page_size, retry, timeout)
iterator = self._client._firestore_api.list_documents(
- parent,
- self.id,
- page_size=page_size,
- show_missing=True,
- metadata=self._client._rpc_metadata,
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
)
- iterator.collection = self
- iterator.item_to_value = _item_to_document_ref
- return iterator
-
- def select(self, field_paths):
- """Create a "select" query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1.query.Query.select` for
- more information on this method.
-
- Args:
- field_paths (Iterable[str, ...]): An iterable of field paths
- (``.``-delimited list of field names) to use as a projection
- of document fields in the query results.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A "projected" query.
- """
- query = query_mod.Query(self)
- return query.select(field_paths)
-
- def where(self, field_path, op_string, value):
- """Create a "where" query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1.query.Query.where` for
- more information on this method.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) for the field to filter on.
- op_string (str): A comparison operation in the form of a string.
- Acceptable values are ``<``, ``<=``, ``==``, ``>=``
- and ``>``.
- value (Any): The value to compare the field against in the filter.
- If ``value`` is :data:`None` or a NaN, then ``==`` is the only
- allowed operation.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A filtered query.
- """
- query = query_mod.Query(self)
- return query.where(field_path, op_string, value)
-
- def order_by(self, field_path, **kwargs):
- """Create an "order by" query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1.query.Query.order_by` for
- more information on this method.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) on which to order the query results.
- kwargs (Dict[str, Any]): The keyword arguments to pass along
- to the query. The only supported keyword is ``direction``,
- see :meth:`~google.cloud.firestore_v1.query.Query.order_by`
- for more information.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- An "order by" query.
- """
- query = query_mod.Query(self)
- return query.order_by(field_path, **kwargs)
-
- def limit(self, count):
- """Create a limited query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1.query.Query.limit` for
- more information on this method.
-
- Args:
- count (int): Maximum number of documents to return that match
- the query.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A limited query.
- """
- query = query_mod.Query(self)
- return query.limit(count)
-
- def offset(self, num_to_skip):
- """Skip to an offset in a query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1.query.Query.offset` for
- more information on this method.
-
- Args:
- num_to_skip (int): The number of results to skip at the beginning
- of query results. (Must be non-negative.)
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- An offset query.
- """
- query = query_mod.Query(self)
- return query.offset(num_to_skip)
-
- def start_at(self, document_fields):
- """Start query at a cursor with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1.query.Query.start_at` for
- more information on this method.
-
- Args:
- document_fields (Union[:class:`~google.cloud.firestore_v1.\
- document.DocumentSnapshot`, dict, list, tuple]):
- A document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor.
- """
- query = query_mod.Query(self)
- return query.start_at(document_fields)
-
- def start_after(self, document_fields):
- """Start query after a cursor with this collection as parent.
+ return (_item_to_document_ref(self, i) for i in iterator)
+
+ def get(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> list:
+ """Read the documents in this collection.
- See
- :meth:`~google.cloud.firestore_v1.query.Query.start_after` for
- more information on this method.
+ This sends a ``RunQuery`` RPC and returns a list of documents
+ returned in the stream of ``RunQueryResponse`` messages.
Args:
- document_fields (Union[:class:`~google.cloud.firestore_v1.\
- document.DocumentSnapshot`, dict, list, tuple]):
- A document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor.
- """
- query = query_mod.Query(self)
- return query.start_after(document_fields)
-
- def end_before(self, document_fields):
- """End query before a cursor with this collection as parent.
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
- See
- :meth:`~google.cloud.firestore_v1.query.Query.end_before` for
- more information on this method.
-
- Args:
- document_fields (Union[:class:`~google.cloud.firestore_v1.\
- document.DocumentSnapshot`, dict, list, tuple]):
- A document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor.
+ list: The documents in this collection that match the query.
"""
- query = query_mod.Query(self)
- return query.end_before(document_fields)
-
- def end_at(self, document_fields):
- """End query at a cursor with this collection as parent.
+ query, kwargs = self._prep_get_or_stream(retry, timeout)
- See
- :meth:`~google.cloud.firestore_v1.query.Query.end_at` for
- more information on this method.
-
- Args:
- document_fields (Union[:class:`~google.cloud.firestore_v1.\
- document.DocumentSnapshot`, dict, list, tuple]):
- A document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor.
- """
- query = query_mod.Query(self)
- return query.end_at(document_fields)
-
- def get(self, transaction=None):
- """Deprecated alias for :meth:`stream`."""
- warnings.warn(
- "'Collection.get' is deprecated: please use 'Collection.stream' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.stream(transaction=transaction)
+ return query.get(transaction=transaction, **kwargs)
- def stream(self, transaction=None):
+ def stream(
+ self,
+ transaction: Transaction = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[document.DocumentSnapshot, Any, None]:
"""Read the documents in this collection.
This sends a ``RunQuery`` RPC and then returns an iterator which
@@ -405,15 +195,20 @@ def stream(self, transaction=None):
transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.\
Transaction`]):
An existing transaction that the query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Yields:
:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
The next document that fulfills the query.
"""
- query = query_mod.Query(self)
- return query.stream(transaction=transaction)
+ query, kwargs = self._prep_get_or_stream(retry, timeout)
+
+ return query.stream(transaction=transaction, **kwargs)
- def on_snapshot(self, callback):
+ def on_snapshot(self, callback: Callable) -> Watch:
"""Monitor the documents in this collection.
This starts a watch on this collection using a background thread. The
@@ -440,30 +235,8 @@ def on_snapshot(collection_snapshot, changes, read_time):
collection_watch.unsubscribe()
"""
return Watch.for_query(
- query_mod.Query(self),
+ self._query(),
callback,
document.DocumentSnapshot,
document.DocumentReference,
)
-
-
-def _auto_id():
- """Generate a "random" automatically generated ID.
-
- Returns:
- str: A 20 character string composed of digits, uppercase and
- lowercase and letters.
- """
- return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20))
-
-
-def _item_to_document_ref(iterator, item):
- """Convert Document resource to document ref.
-
- Args:
- iterator (google.api_core.page_iterator.GRPCIterator):
- iterator response
- item (dict): document resource
- """
- document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1]
- return iterator.collection.document(document_id)
diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py
index 571315e875..bdb5c7943b 100644
--- a/google/cloud/firestore_v1/document.py
+++ b/google/cloud/firestore_v1/document.py
@@ -14,18 +14,24 @@
"""Classes for representing documents for the Google Cloud Firestore API."""
-import copy
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
-import six
+from google.cloud.firestore_v1.base_document import (
+ BaseDocumentReference,
+ DocumentSnapshot,
+ _first_write_result,
+)
-from google.api_core import exceptions
+from google.api_core import exceptions # type: ignore
from google.cloud.firestore_v1 import _helpers
-from google.cloud.firestore_v1 import field_path as field_path_module
-from google.cloud.firestore_v1.proto import common_pb2
+from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.watch import Watch
+from google.protobuf import timestamp_pb2
+from typing import Any, Callable, Generator, Iterable
-class DocumentReference(object):
+class DocumentReference(BaseDocumentReference):
"""A reference to a document in a Firestore database.
The document may already exist or can be created by this class.
@@ -50,144 +56,24 @@ class DocumentReference(object):
TypeError: If a keyword other than ``client`` is used.
"""
- _document_path_internal = None
+ def __init__(self, *path, **kwargs) -> None:
+ super(DocumentReference, self).__init__(*path, **kwargs)
- def __init__(self, *path, **kwargs):
- _helpers.verify_path(path, is_collection=False)
- self._path = path
- self._client = kwargs.pop("client", None)
- if kwargs:
- raise TypeError(
- "Received unexpected arguments", kwargs, "Only `client` is supported"
- )
-
- def __copy__(self):
- """Shallow copy the instance.
-
- We leave the client "as-is" but tuple-unpack the path.
-
- Returns:
- .DocumentReference: A copy of the current document.
- """
- result = self.__class__(*self._path, client=self._client)
- result._document_path_internal = self._document_path_internal
- return result
-
- def __deepcopy__(self, unused_memo):
- """Deep copy the instance.
-
- This isn't a true deep copy, wee leave the client "as-is" but
- tuple-unpack the path.
-
- Returns:
- .DocumentReference: A copy of the current document.
- """
- return self.__copy__()
-
- def __eq__(self, other):
- """Equality check against another instance.
-
- Args:
- other (Any): A value to compare against.
-
- Returns:
- Union[bool, NotImplementedType]: Indicating if the values are
- equal.
- """
- if isinstance(other, DocumentReference):
- return self._client == other._client and self._path == other._path
- else:
- return NotImplemented
-
- def __hash__(self):
- return hash(self._path) + hash(self._client)
-
- def __ne__(self, other):
- """Inequality check against another instance.
-
- Args:
- other (Any): A value to compare against.
-
- Returns:
- Union[bool, NotImplementedType]: Indicating if the values are
- not equal.
- """
- if isinstance(other, DocumentReference):
- return self._client != other._client or self._path != other._path
- else:
- return NotImplemented
-
- @property
- def path(self):
- """Database-relative for this document.
-
- Returns:
- str: The document's relative path.
- """
- return "/".join(self._path)
-
- @property
- def _document_path(self):
- """Create and cache the full path for this document.
-
- Of the form:
-
- ``projects/{project_id}/databases/{database_id}/...
- documents/{document_path}``
-
- Returns:
- str: The full document path.
-
- Raises:
- ValueError: If the current document reference has no ``client``.
- """
- if self._document_path_internal is None:
- if self._client is None:
- raise ValueError("A document reference requires a `client`.")
- self._document_path_internal = _get_document_path(self._client, self._path)
-
- return self._document_path_internal
-
- @property
- def id(self):
- """The document identifier (within its collection).
-
- Returns:
- str: The last component of the path.
- """
- return self._path[-1]
-
- @property
- def parent(self):
- """Collection that owns the current document.
-
- Returns:
- :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
- The parent collection.
- """
- parent_path = self._path[:-1]
- return self._client.collection(*parent_path)
-
- def collection(self, collection_id):
- """Create a sub-collection underneath the current document.
-
- Args:
- collection_id (str): The sub-collection identifier (sometimes
- referred to as the "kind").
-
- Returns:
- :class:`~google.cloud.firestore_v1.collection.CollectionReference`:
- The child collection.
- """
- child_path = self._path + (collection_id,)
- return self._client.collection(*child_path)
-
- def create(self, document_data):
+ def create(
+ self,
+ document_data: dict,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> write.WriteResult:
"""Create the current document in the Firestore database.
Args:
document_data (dict): Property names and values to use for
creating a document.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
:class:`~google.cloud.firestore_v1.types.WriteResult`:
@@ -198,12 +84,17 @@ def create(self, document_data):
:class:`~google.cloud.exceptions.Conflict`:
If the document already exists.
"""
- batch = self._client.batch()
- batch.create(self, document_data)
- write_results = batch.commit()
+ batch, kwargs = self._prep_create(document_data, retry, timeout)
+ write_results = batch.commit(**kwargs)
return _first_write_result(write_results)
- def set(self, document_data, merge=False):
+ def set(
+ self,
+ document_data: dict,
+ merge: bool = False,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> write.WriteResult:
"""Replace the current document in the Firestore database.
A write ``option`` can be specified to indicate preconditions of
@@ -223,18 +114,27 @@ def set(self, document_data, merge=False):
merge (Optional[bool] or Optional[List]):
If True, apply merging instead of overwriting the state
of the document.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
:class:`~google.cloud.firestore_v1.types.WriteResult`:
The write result corresponding to the committed document. A write
result contains an ``update_time`` field.
"""
- batch = self._client.batch()
- batch.set(self, document_data, merge=merge)
- write_results = batch.commit()
+ batch, kwargs = self._prep_set(document_data, merge, retry, timeout)
+ write_results = batch.commit(**kwargs)
return _first_write_result(write_results)
- def update(self, field_updates, option=None):
+ def update(
+ self,
+ field_updates: dict,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> write.WriteResult:
"""Update an existing document in the Firestore database.
By default, this method verifies that the document exists on the
@@ -368,6 +268,10 @@ def update(self, field_updates, option=None):
option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
A write option to make assertions / preconditions on the server
state of the document before applying changes.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
:class:`~google.cloud.firestore_v1.types.WriteResult`:
@@ -377,18 +281,26 @@ def update(self, field_updates, option=None):
Raises:
~google.cloud.exceptions.NotFound: If the document does not exist.
"""
- batch = self._client.batch()
- batch.update(self, field_updates, option=option)
- write_results = batch.commit()
+ batch, kwargs = self._prep_update(field_updates, option, retry, timeout)
+ write_results = batch.commit(**kwargs)
return _first_write_result(write_results)
- def delete(self, option=None):
+ def delete(
+ self,
+ option: _helpers.WriteOption = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> timestamp_pb2.Timestamp:
"""Delete the current document in the Firestore database.
Args:
option (Optional[:class:`~google.cloud.firestore_v1.client.WriteOption`]):
A write option to make assertions / preconditions on the server
state of the document before applying changes.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
:class:`google.protobuf.timestamp_pb2.Timestamp`:
@@ -397,20 +309,24 @@ def delete(self, option=None):
nothing was deleted), this method will still succeed and will
still return the time that the request was received by the server.
"""
- write_pb = _helpers.pb_for_delete(self._document_path, option)
+ request, kwargs = self._prep_delete(option, retry, timeout)
+
commit_response = self._client._firestore_api.commit(
- self._client._database_string,
- [write_pb],
- transaction=None,
- metadata=self._client._rpc_metadata,
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
)
return commit_response.commit_time
- def get(self, field_paths=None, transaction=None):
+ def get(
+ self,
+ field_paths: Iterable[str] = None,
+ transaction=None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> DocumentSnapshot:
"""Retrieve a snapshot of the current document.
- See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
+ See :meth:`~google.cloud.firestore_v1.base_client.BaseClient.field_path` for
more information on **field paths**.
If a ``transaction`` is used and it already has write operations
@@ -425,30 +341,25 @@ def get(self, field_paths=None, transaction=None):
transaction (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
An existing transaction that this reference
will be retrieved in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if an y,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
- :class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
+ :class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`:
A snapshot of the current document. If the document does not
exist at the time of the snapshot is taken, the snapshot's
:attr:`reference`, :attr:`data`, :attr:`update_time`, and
:attr:`create_time` attributes will all be ``None`` and
its :attr:`exists` attribute will be ``False``.
"""
- if isinstance(field_paths, six.string_types):
- raise ValueError("'field_paths' must be a sequence of paths, not a string.")
-
- if field_paths is not None:
- mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
- else:
- mask = None
+ request, kwargs = self._prep_get(field_paths, transaction, retry, timeout)
firestore_api = self._client._firestore_api
try:
document_pb = firestore_api.get_document(
- self._document_path,
- mask=mask,
- transaction=_helpers.get_transaction_id(transaction),
- metadata=self._client._rpc_metadata,
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
)
except exceptions.NotFound:
data = None
@@ -470,13 +381,22 @@ def get(self, field_paths=None, transaction=None):
update_time=update_time,
)
- def collections(self, page_size=None):
+ def collections(
+ self,
+ page_size: int = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[Any, Any, None]:
"""List subcollections of the current document.
Args:
page_size (Optional[int]]): The maximum number of collections
- in each page of results from this request. Non-positive values
- are ignored. Defaults to a sensible value set by the API.
+ in each page of results from this request. Non-positive values
+ are ignored. Defaults to a sensible value set by the API.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
@@ -484,16 +404,16 @@ def collections(self, page_size=None):
document does not exist at the time of `snapshot`, the
iterator will be empty
"""
+ request, kwargs = self._prep_collections(page_size, retry, timeout)
+
iterator = self._client._firestore_api.list_collection_ids(
- self._document_path,
- page_size=page_size,
- metadata=self._client._rpc_metadata,
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
)
- iterator.document = self
- iterator.item_to_value = _item_to_collection_ref
- return iterator
- def on_snapshot(self, callback):
+ for collection_id in iterator:
+ yield self.collection(collection_id)
+
+ def on_snapshot(self, callback: Callable) -> Watch:
"""Watch this document.
This starts a watch on this document using a background thread. The
@@ -526,261 +446,3 @@ def on_snapshot(document_snapshot, changes, read_time):
doc_watch.unsubscribe()
"""
return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference)
-
-
-class DocumentSnapshot(object):
- """A snapshot of document data in a Firestore database.
-
- This represents data retrieved at a specific time and may not contain
- all fields stored for the document (i.e. a hand-picked selection of
- fields may have been retrieved).
-
- Instances of this class are not intended to be constructed by hand,
- rather they'll be returned as responses to various methods, such as
- :meth:`~google.cloud.DocumentReference.get`.
-
- Args:
- reference (:class:`~google.cloud.firestore_v1.document.DocumentReference`):
- A document reference corresponding to the document that contains
- the data in this snapshot.
- data (Dict[str, Any]):
- The data retrieved in the snapshot.
- exists (bool):
- Indicates if the document existed at the time the snapshot was
- retrieved.
- read_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
- The time that this snapshot was read from the server.
- create_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
- The time that this document was created.
- update_time (:class:`google.protobuf.timestamp_pb2.Timestamp`):
- The time that this document was last updated.
- """
-
- def __init__(self, reference, data, exists, read_time, create_time, update_time):
- self._reference = reference
- # We want immutable data, so callers can't modify this value
- # out from under us.
- self._data = copy.deepcopy(data)
- self._exists = exists
- self.read_time = read_time
- self.create_time = create_time
- self.update_time = update_time
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._reference == other._reference and self._data == other._data
-
- def __hash__(self):
- seconds = self.update_time.seconds
- nanos = self.update_time.nanos
- return hash(self._reference) + hash(seconds) + hash(nanos)
-
- @property
- def _client(self):
- """The client that owns the document reference for this snapshot.
-
- Returns:
- :class:`~google.cloud.firestore_v1.client.Client`:
- The client that owns this document.
- """
- return self._reference._client
-
- @property
- def exists(self):
- """Existence flag.
-
- Indicates if the document existed at the time this snapshot
- was retrieved.
-
- Returns:
- bool: The existence flag.
- """
- return self._exists
-
- @property
- def id(self):
- """The document identifier (within its collection).
-
- Returns:
- str: The last component of the path of the document.
- """
- return self._reference.id
-
- @property
- def reference(self):
- """Document reference corresponding to document that owns this data.
-
- Returns:
- :class:`~google.cloud.firestore_v1.document.DocumentReference`:
- A document reference corresponding to this document.
- """
- return self._reference
-
- def get(self, field_path):
- """Get a value from the snapshot data.
-
- If the data is nested, for example:
-
- .. code-block:: python
-
- >>> snapshot.to_dict()
- {
- 'top1': {
- 'middle2': {
- 'bottom3': 20,
- 'bottom4': 22,
- },
- 'middle5': True,
- },
- 'top6': b'\x00\x01 foo',
- }
-
- a **field path** can be used to access the nested data. For
- example:
-
- .. code-block:: python
-
- >>> snapshot.get('top1')
- {
- 'middle2': {
- 'bottom3': 20,
- 'bottom4': 22,
- },
- 'middle5': True,
- }
- >>> snapshot.get('top1.middle2')
- {
- 'bottom3': 20,
- 'bottom4': 22,
- }
- >>> snapshot.get('top1.middle2.bottom3')
- 20
-
- See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
- more information on **field paths**.
-
- A copy is returned since the data may contain mutable values,
- but the data stored in the snapshot must remain immutable.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names).
-
- Returns:
- Any or None:
- (A copy of) the value stored for the ``field_path`` or
- None if snapshot document does not exist.
-
- Raises:
- KeyError: If the ``field_path`` does not match nested data
- in the snapshot.
- """
- if not self._exists:
- return None
- nested_data = field_path_module.get_nested_value(field_path, self._data)
- return copy.deepcopy(nested_data)
-
- def to_dict(self):
- """Retrieve the data contained in this snapshot.
-
- A copy is returned since the data may contain mutable values,
- but the data stored in the snapshot must remain immutable.
-
- Returns:
- Dict[str, Any] or None:
- The data in the snapshot. Returns None if reference
- does not exist.
- """
- if not self._exists:
- return None
- return copy.deepcopy(self._data)
-
-
-def _get_document_path(client, path):
- """Convert a path tuple into a full path string.
-
- Of the form:
-
- ``projects/{project_id}/databases/{database_id}/...
- documents/{document_path}``
-
- Args:
- client (:class:`~google.cloud.firestore_v1.client.Client`):
- The client that holds configuration details and a GAPIC client
- object.
- path (Tuple[str, ...]): The components in a document path.
-
- Returns:
- str: The fully-qualified document path.
- """
- parts = (client._database_string, "documents") + path
- return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
-
-
-def _consume_single_get(response_iterator):
- """Consume a gRPC stream that should contain a single response.
-
- The stream will correspond to a ``BatchGetDocuments`` request made
- for a single document.
-
- Args:
- response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
- streaming iterator returned from a ``BatchGetDocuments``
- request.
-
- Returns:
- ~google.cloud.proto.firestore.v1.\
- firestore_pb2.BatchGetDocumentsResponse: The single "get"
- response in the batch.
-
- Raises:
- ValueError: If anything other than exactly one response is returned.
- """
- # Calling ``list()`` consumes the entire iterator.
- all_responses = list(response_iterator)
- if len(all_responses) != 1:
- raise ValueError(
- "Unexpected response from `BatchGetDocumentsResponse`",
- all_responses,
- "Expected only one result",
- )
-
- return all_responses[0]
-
-
-def _first_write_result(write_results):
- """Get first write result from list.
-
- For cases where ``len(write_results) > 1``, this assumes the writes
- occurred at the same time (e.g. if an update and transform are sent
- at the same time).
-
- Args:
- write_results (List[google.cloud.proto.firestore.v1.\
- write_pb2.WriteResult, ...]: The write results from a
- ``CommitResponse``.
-
- Returns:
- google.cloud.firestore_v1.types.WriteResult: The
- lone write result from ``write_results``.
-
- Raises:
- ValueError: If there are zero write results. This is likely to
- **never** occur, since the backend should be stable.
- """
- if not write_results:
- raise ValueError("Expected at least one write result")
-
- return write_results[0]
-
-
-def _item_to_collection_ref(iterator, item):
- """Convert collection ID to collection ref.
-
- Args:
- iterator (google.api_core.page_iterator.GRPCIterator):
- iterator response
- item (str): ID of the collection
- """
- return iterator.document.collection(item)
diff --git a/google/cloud/firestore_v1/field_path.py b/google/cloud/firestore_v1/field_path.py
index 58b4f3b9ac..610d8ffd83 100644
--- a/google/cloud/firestore_v1/field_path.py
+++ b/google/cloud/firestore_v1/field_path.py
@@ -14,14 +14,10 @@
"""Utilities for managing / converting field paths to / from strings."""
-try:
- from collections import abc as collections_abc
-except ImportError: # Python 2.7
- import collections as collections_abc
+from collections import abc
import re
-
-import six
+from typing import Iterable
_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data"
@@ -47,7 +43,7 @@
TOKENS_REGEX = re.compile(TOKENS_PATTERN)
-def _tokenize_field_path(path):
+def _tokenize_field_path(path: str):
"""Lex a field path into tokens (including dots).
Args:
@@ -68,7 +64,7 @@ def _tokenize_field_path(path):
raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:]))
-def split_field_path(path):
+def split_field_path(path: str):
"""Split a field path into valid elements (without dots).
Args:
@@ -103,7 +99,7 @@ def split_field_path(path):
return elements
-def parse_field_path(api_repr):
+def parse_field_path(api_repr: str):
"""Parse a **field path** from into a list of nested field names.
See :func:`field_path` for more on **field paths**.
@@ -132,7 +128,7 @@ def parse_field_path(api_repr):
return field_names
-def render_field_path(field_names):
+def render_field_path(field_names: Iterable[str]):
"""Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
@@ -176,7 +172,7 @@ def render_field_path(field_names):
get_field_path = render_field_path # backward-compatibility
-def get_nested_value(field_path, data):
+def get_nested_value(field_path: str, data: dict):
"""Get a (potentially nested) value from a dictionary.
If the data is nested, for example:
@@ -234,7 +230,7 @@ def get_nested_value(field_path, data):
nested_data = data
for index, field_name in enumerate(field_names):
- if isinstance(nested_data, collections_abc.Mapping):
+ if isinstance(nested_data, abc.Mapping):
if field_name in nested_data:
nested_data = nested_data[field_name]
else:
@@ -271,13 +267,13 @@ class FieldPath(object):
def __init__(self, *parts):
for part in parts:
- if not isinstance(part, six.string_types) or not part:
+ if not isinstance(part, str) or not part:
error = "One or more components is not a string or is empty."
raise ValueError(error)
self.parts = tuple(parts)
@classmethod
- def from_api_repr(cls, api_repr):
+ def from_api_repr(cls, api_repr: str):
"""Factory: create a FieldPath from the string formatted per the API.
Args:
@@ -294,7 +290,7 @@ def from_api_repr(cls, api_repr):
return cls(*parse_field_path(api_repr))
@classmethod
- def from_string(cls, path_string):
+ def from_string(cls, path_string: str):
"""Factory: create a FieldPath from a unicode string representation.
This method splits on the character `.` and disallows the
@@ -353,7 +349,7 @@ def __add__(self, other):
if isinstance(other, FieldPath):
parts = self.parts + other.parts
return FieldPath(*parts)
- elif isinstance(other, six.string_types):
+ elif isinstance(other, str):
parts = self.parts + FieldPath.from_string(other).parts
return FieldPath(*parts)
else:
@@ -382,7 +378,7 @@ def lineage(self):
Returns: Set[:class:`FieldPath`]
"""
- indexes = six.moves.range(1, len(self.parts))
+ indexes = range(1, len(self.parts))
return {FieldPath(*self.parts[:index]) for index in indexes}
@staticmethod
diff --git a/google/cloud/firestore_v1/gapic/__init__.py b/google/cloud/firestore_v1/gapic/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1/gapic/enums.py b/google/cloud/firestore_v1/gapic/enums.py
deleted file mode 100644
index ee7a9ec6f5..0000000000
--- a/google/cloud/firestore_v1/gapic/enums.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Wrappers for protocol buffer enum types."""
-
-import enum
-
-
-class NullValue(enum.IntEnum):
- """
- ``NullValue`` is a singleton enumeration to represent the null value for
- the ``Value`` type union.
-
- The JSON representation for ``NullValue`` is JSON ``null``.
-
- Attributes:
- NULL_VALUE (int): Null value.
- """
-
- NULL_VALUE = 0
-
-
-class DocumentTransform(object):
- class FieldTransform(object):
- class ServerValue(enum.IntEnum):
- """
- A value that is calculated by the server.
-
- Attributes:
- SERVER_VALUE_UNSPECIFIED (int): Unspecified. This value must not be used.
- REQUEST_TIME (int): The time at which the server processed the request, with millisecond
- precision.
- """
-
- SERVER_VALUE_UNSPECIFIED = 0
- REQUEST_TIME = 1
-
-
-class StructuredQuery(object):
- class Direction(enum.IntEnum):
- """
- A sort direction.
-
- Attributes:
- DIRECTION_UNSPECIFIED (int): Unspecified.
- ASCENDING (int): Ascending.
- DESCENDING (int): Descending.
- """
-
- DIRECTION_UNSPECIFIED = 0
- ASCENDING = 1
- DESCENDING = 2
-
- class CompositeFilter(object):
- class Operator(enum.IntEnum):
- """
- A composite filter operator.
-
- Attributes:
- OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
- AND (int): The results are required to satisfy each of the combined filters.
- """
-
- OPERATOR_UNSPECIFIED = 0
- AND = 1
-
- class FieldFilter(object):
- class Operator(enum.IntEnum):
- """
- A field filter operator.
-
- Attributes:
- OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
- LESS_THAN (int): Less than. Requires that the field come first in ``order_by``.
- LESS_THAN_OR_EQUAL (int): Less than or equal. Requires that the field come first in ``order_by``.
- GREATER_THAN (int): Greater than. Requires that the field come first in ``order_by``.
- GREATER_THAN_OR_EQUAL (int): Greater than or equal. Requires that the field come first in
- ``order_by``.
- EQUAL (int): Equal.
- ARRAY_CONTAINS (int): Contains. Requires that the field is an array.
- IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most 10
- values.
- ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a
- non-empty ArrayValue with at most 10 values.
- """
-
- OPERATOR_UNSPECIFIED = 0
- LESS_THAN = 1
- LESS_THAN_OR_EQUAL = 2
- GREATER_THAN = 3
- GREATER_THAN_OR_EQUAL = 4
- EQUAL = 5
- ARRAY_CONTAINS = 7
- IN = 8
- ARRAY_CONTAINS_ANY = 9
-
- class UnaryFilter(object):
- class Operator(enum.IntEnum):
- """
- A unary operator.
-
- Attributes:
- OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
- IS_NAN (int): Test if a field is equal to NaN.
- IS_NULL (int): Test if an expression evaluates to Null.
- """
-
- OPERATOR_UNSPECIFIED = 0
- IS_NAN = 2
- IS_NULL = 3
-
-
-class TargetChange(object):
- class TargetChangeType(enum.IntEnum):
- """
- The type of change.
-
- Attributes:
- NO_CHANGE (int): No change has occurred. Used only to send an updated ``resume_token``.
- ADD (int): The targets have been added.
- REMOVE (int): The targets have been removed.
- CURRENT (int): The targets reflect all changes committed before the targets were added
- to the stream.
-
- This will be sent after or with a ``read_time`` that is greater than or
- equal to the time at which the targets were added.
-
- Listeners can wait for this change if read-after-write semantics are
- desired.
- RESET (int): The targets have been reset, and a new initial state for the targets
- will be returned in subsequent changes.
-
- After the initial state is complete, ``CURRENT`` will be returned even
- if the target was previously indicated to be ``CURRENT``.
- """
-
- NO_CHANGE = 0
- ADD = 1
- REMOVE = 2
- CURRENT = 3
- RESET = 4
diff --git a/google/cloud/firestore_v1/gapic/firestore_client.py b/google/cloud/firestore_v1/gapic/firestore_client.py
deleted file mode 100644
index d6f3e33206..0000000000
--- a/google/cloud/firestore_v1/gapic/firestore_client.py
+++ /dev/null
@@ -1,1452 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Accesses the google.firestore.v1 Firestore API."""
-
-import functools
-import pkg_resources
-import warnings
-
-from google.oauth2 import service_account
-import google.api_core.client_options
-import google.api_core.gapic_v1.client_info
-import google.api_core.gapic_v1.config
-import google.api_core.gapic_v1.method
-import google.api_core.gapic_v1.routing_header
-import google.api_core.grpc_helpers
-import google.api_core.page_iterator
-import google.api_core.path_template
-import google.api_core.protobuf_helpers
-import grpc
-
-from google.cloud.firestore_v1.gapic import enums
-from google.cloud.firestore_v1.gapic import firestore_client_config
-from google.cloud.firestore_v1.gapic.transports import firestore_grpc_transport
-from google.cloud.firestore_v1.proto import common_pb2
-from google.cloud.firestore_v1.proto import document_pb2
-from google.cloud.firestore_v1.proto import firestore_pb2
-from google.cloud.firestore_v1.proto import firestore_pb2_grpc
-from google.cloud.firestore_v1.proto import query_pb2
-from google.cloud.firestore_v1.proto import write_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import timestamp_pb2
-
-
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
- "google-cloud-firestore"
-).version
-
-
-class FirestoreClient(object):
- """
- The Cloud Firestore service.
-
- Cloud Firestore is a fast, fully managed, serverless, cloud-native NoSQL
- document database that simplifies storing, syncing, and querying data for
- your mobile, web, and IoT apps at global scale. Its client libraries provide
- live synchronization and offline support, while its security features and
- integrations with Firebase and Google Cloud Platform (GCP) accelerate
- building truly serverless apps.
- """
-
- SERVICE_ADDRESS = "firestore.googleapis.com:443"
- """The default address of the service."""
-
- # The name of the interface for this client. This is the key used to
- # find the method configuration in the client_config dictionary.
- _INTERFACE_NAME = "google.firestore.v1.Firestore"
-
- @classmethod
- def from_service_account_file(cls, filename, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- FirestoreClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @classmethod
- def any_path_path(cls, project, database, document, any_path):
- """Return a fully-qualified any_path string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/documents/{document}/{any_path=**}",
- project=project,
- database=database,
- document=document,
- any_path=any_path,
- )
-
- @classmethod
- def database_root_path(cls, project, database):
- """Return a fully-qualified database_root string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}",
- project=project,
- database=database,
- )
-
- @classmethod
- def document_path_path(cls, project, database, document_path):
- """Return a fully-qualified document_path string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/documents/{document_path=**}",
- project=project,
- database=database,
- document_path=document_path,
- )
-
- @classmethod
- def document_root_path(cls, project, database):
- """Return a fully-qualified document_root string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/documents",
- project=project,
- database=database,
- )
-
- def __init__(
- self,
- transport=None,
- channel=None,
- credentials=None,
- client_config=None,
- client_info=None,
- client_options=None,
- ):
- """Constructor.
-
- Args:
- transport (Union[~.FirestoreGrpcTransport,
- Callable[[~.Credentials, type], ~.FirestoreGrpcTransport]): A transport
- instance, responsible for actually making the API calls.
- The default transport uses the gRPC protocol.
- This argument may also be a callable which returns a
- transport instance. Callables will be sent the credentials
- as the first argument and the default transport class as
- the second argument.
- channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
- through which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- This argument is mutually exclusive with providing a
- transport instance to ``transport``; doing so will raise
- an exception.
- client_config (dict): DEPRECATED. A dictionary of call options for
- each method. If not specified, the default configuration is used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- client_options (Union[dict, google.api_core.client_options.ClientOptions]):
- Client options used to set user options on the client. API Endpoint
- should be set through client_options.
- """
- # Raise deprecation warnings for things we want to go away.
- if client_config is not None:
- warnings.warn(
- "The `client_config` argument is deprecated.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
- else:
- client_config = firestore_client_config.config
-
- if channel:
- warnings.warn(
- "The `channel` argument is deprecated; use " "`transport` instead.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
-
- api_endpoint = self.SERVICE_ADDRESS
- if client_options:
- if type(client_options) == dict:
- client_options = google.api_core.client_options.from_dict(
- client_options
- )
- if client_options.api_endpoint:
- api_endpoint = client_options.api_endpoint
-
- # Instantiate the transport.
- # The transport is responsible for handling serialization and
- # deserialization and actually sending data to the service.
- if transport:
- if callable(transport):
- self.transport = transport(
- credentials=credentials,
- default_class=firestore_grpc_transport.FirestoreGrpcTransport,
- address=api_endpoint,
- )
- else:
- if credentials:
- raise ValueError(
- "Received both a transport instance and "
- "credentials; these are mutually exclusive."
- )
- self.transport = transport
- else:
- self.transport = firestore_grpc_transport.FirestoreGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
- )
-
- if client_info is None:
- client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
- )
- else:
- client_info.gapic_version = _GAPIC_LIBRARY_VERSION
- self._client_info = client_info
-
- # Parse out the default settings for retry and timeout for each RPC
- # from the client configuration.
- # (Ordinarily, these are the defaults specified in the `*_config.py`
- # file next to this one.)
- self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
- )
-
- # Save a dictionary of cached API call functions.
- # These are the actual callables which invoke the proper
- # transport methods, wrapped with `wrap_method` to add retry,
- # timeout, and the like.
- self._inner_api_calls = {}
-
- # Service calls
- def get_document(
- self,
- name,
- mask=None,
- transaction=None,
- read_time=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets a single document.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> response = client.get_document(name)
-
- Args:
- name (str): Required. The resource name of the Document to get. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If the document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.DocumentMask`
- transaction (bytes): Reads the document in a transaction.
- read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads the version of the document at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Timestamp`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1.types.Document` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_document,
- default_retry=self._method_configs["GetDocument"].retry,
- default_timeout=self._method_configs["GetDocument"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction, read_time=read_time
- )
-
- request = firestore_pb2.GetDocumentRequest(
- name=name, mask=mask, transaction=transaction, read_time=read_time
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def list_documents(
- self,
- parent,
- collection_id,
- page_size=None,
- order_by=None,
- mask=None,
- transaction=None,
- read_time=None,
- show_missing=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists documents.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> # TODO: Initialize `collection_id`:
- >>> collection_id = ''
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_documents(parent, collection_id):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_documents(parent, collection_id).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- For example: ``projects/my-project/databases/my-database/documents`` or
- ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
- example: ``chatrooms`` or ``messages``.
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- order_by (str): The order to sort results by. For example: ``priority desc, name``.
- mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If a document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.DocumentMask`
- transaction (bytes): Reads documents in a transaction.
- read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Timestamp`
- show_missing (bool): If the list should show missing documents. A missing document is a
- document that does not exist but has sub-documents. These documents will
- be returned with a key but will not have fields,
- ``Document.create_time``, or ``Document.update_time`` set.
-
- Requests with ``show_missing`` may not specify ``where`` or
- ``order_by``.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`~google.cloud.firestore_v1.types.Document` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_documents" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_documents"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_documents,
- default_retry=self._method_configs["ListDocuments"].retry,
- default_timeout=self._method_configs["ListDocuments"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction, read_time=read_time
- )
-
- request = firestore_pb2.ListDocumentsRequest(
- parent=parent,
- collection_id=collection_id,
- page_size=page_size,
- order_by=order_by,
- mask=mask,
- transaction=transaction,
- read_time=read_time,
- show_missing=show_missing,
- )
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_documents"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="documents",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
-
- def create_document(
- self,
- parent,
- collection_id,
- document_id,
- document,
- mask=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates a new document.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> # TODO: Initialize `collection_id`:
- >>> collection_id = ''
- >>>
- >>> # TODO: Initialize `document_id`:
- >>> document_id = ''
- >>>
- >>> # TODO: Initialize `document`:
- >>> document = {}
- >>>
- >>> response = client.create_document(parent, collection_id, document_id, document)
-
- Args:
- parent (str): Required. The parent resource. For example:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}``
- collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
- example: ``chatrooms``.
- document_id (str): The client-assigned document ID to use for this document.
-
- Optional. If not specified, an ID will be assigned by the service.
- document (Union[dict, ~google.cloud.firestore_v1.types.Document]): Required. The document to create. ``name`` must not be set.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Document`
- mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If the document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.DocumentMask`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1.types.Document` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_document,
- default_retry=self._method_configs["CreateDocument"].retry,
- default_timeout=self._method_configs["CreateDocument"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.CreateDocumentRequest(
- parent=parent,
- collection_id=collection_id,
- document_id=document_id,
- document=document,
- mask=mask,
- )
- return self._inner_api_calls["create_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def update_document(
- self,
- document,
- update_mask,
- mask=None,
- current_document=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Updates or inserts a document.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> # TODO: Initialize `document`:
- >>> document = {}
- >>>
- >>> # TODO: Initialize `update_mask`:
- >>> update_mask = {}
- >>>
- >>> response = client.update_document(document, update_mask)
-
- Args:
- document (Union[dict, ~google.cloud.firestore_v1.types.Document]): Required. The updated document.
- Creates the document if it does not already exist.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Document`
- update_mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to update.
- None of the field paths in the mask may contain a reserved name.
-
- If the document exists on the server and has fields not referenced in the
- mask, they are left unchanged.
- Fields referenced in the mask, but not present in the input document, are
- deleted from the document on the server.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.DocumentMask`
- mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If the document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.DocumentMask`
- current_document (Union[dict, ~google.cloud.firestore_v1.types.Precondition]): An optional precondition on the document.
- The request will fail if this is set and not met by the target document.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Precondition`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1.types.Document` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "update_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "update_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.update_document,
- default_retry=self._method_configs["UpdateDocument"].retry,
- default_timeout=self._method_configs["UpdateDocument"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.UpdateDocumentRequest(
- document=document,
- update_mask=update_mask,
- mask=mask,
- current_document=current_document,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("document.name", document.name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["update_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def delete_document(
- self,
- name,
- current_document=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Deletes a document.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> client.delete_document(name)
-
- Args:
- name (str): Required. The resource name of the Document to delete. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- current_document (Union[dict, ~google.cloud.firestore_v1.types.Precondition]): An optional precondition on the document.
- The request will fail if this is set and not met by the target document.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Precondition`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "delete_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "delete_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.delete_document,
- default_retry=self._method_configs["DeleteDocument"].retry,
- default_timeout=self._method_configs["DeleteDocument"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.DeleteDocumentRequest(
- name=name, current_document=current_document
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- self._inner_api_calls["delete_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def batch_get_documents(
- self,
- database,
- documents,
- mask=None,
- transaction=None,
- new_transaction=None,
- read_time=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets multiple documents.
-
- Documents returned by this method are not guaranteed to be returned in the
- same order that they were requested.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> # TODO: Initialize `documents`:
- >>> documents = []
- >>>
- >>> for element in client.batch_get_documents(database, documents):
- ... # process element
- ... pass
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- documents (list[str]): The names of the documents to retrieve. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- The request will fail if any of the document is not a child resource of
- the given ``database``. Duplicate names will be elided.
- mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If a document has a field that is not present in this mask, that field will
- not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.DocumentMask`
- transaction (bytes): Reads documents in a transaction.
- new_transaction (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): Starts a new transaction and reads the documents.
- Defaults to a read-only transaction.
- The new transaction ID will be returned as the first response in the
- stream.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.TransactionOptions`
- read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Timestamp`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1.types.BatchGetDocumentsResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "batch_get_documents" not in self._inner_api_calls:
- self._inner_api_calls[
- "batch_get_documents"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.batch_get_documents,
- default_retry=self._method_configs["BatchGetDocuments"].retry,
- default_timeout=self._method_configs["BatchGetDocuments"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
-
- request = firestore_pb2.BatchGetDocumentsRequest(
- database=database,
- documents=documents,
- mask=mask,
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["batch_get_documents"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def begin_transaction(
- self,
- database,
- options_=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Starts a new transaction.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> response = client.begin_transaction(database)
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- options_ (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): The options for the transaction.
- Defaults to a read-write transaction.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.TransactionOptions`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1.types.BeginTransactionResponse` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "begin_transaction" not in self._inner_api_calls:
- self._inner_api_calls[
- "begin_transaction"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.begin_transaction,
- default_retry=self._method_configs["BeginTransaction"].retry,
- default_timeout=self._method_configs["BeginTransaction"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.BeginTransactionRequest(
- database=database, options=options_
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["begin_transaction"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def commit(
- self,
- database,
- writes,
- transaction=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Commits a transaction, while optionally updating documents.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> # TODO: Initialize `writes`:
- >>> writes = []
- >>>
- >>> response = client.commit(database, writes)
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- writes (list[Union[dict, ~google.cloud.firestore_v1.types.Write]]): The writes to apply.
-
- Always executed atomically and in order.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Write`
- transaction (bytes): If set, applies all writes in this transaction, and commits it.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1.types.CommitResponse` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "commit" not in self._inner_api_calls:
- self._inner_api_calls[
- "commit"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.commit,
- default_retry=self._method_configs["Commit"].retry,
- default_timeout=self._method_configs["Commit"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.CommitRequest(
- database=database, writes=writes, transaction=transaction
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["commit"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def rollback(
- self,
- database,
- transaction,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Rolls back a transaction.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> # TODO: Initialize `transaction`:
- >>> transaction = b''
- >>>
- >>> client.rollback(database, transaction)
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- transaction (bytes): Required. The transaction to roll back.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "rollback" not in self._inner_api_calls:
- self._inner_api_calls[
- "rollback"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.rollback,
- default_retry=self._method_configs["Rollback"].retry,
- default_timeout=self._method_configs["Rollback"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.RollbackRequest(
- database=database, transaction=transaction
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- self._inner_api_calls["rollback"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def run_query(
- self,
- parent,
- structured_query=None,
- transaction=None,
- new_transaction=None,
- read_time=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Runs a query.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> for element in client.run_query(parent):
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- For example: ``projects/my-project/databases/my-database/documents`` or
- ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- structured_query (Union[dict, ~google.cloud.firestore_v1.types.StructuredQuery]): A structured query.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.StructuredQuery`
- transaction (bytes): Reads documents in a transaction.
- new_transaction (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): Starts a new transaction and reads the documents.
- Defaults to a read-only transaction.
- The new transaction ID will be returned as the first response in the
- stream.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.TransactionOptions`
- read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1.types.Timestamp`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1.types.RunQueryResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "run_query" not in self._inner_api_calls:
- self._inner_api_calls[
- "run_query"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.run_query,
- default_retry=self._method_configs["RunQuery"].retry,
- default_timeout=self._method_configs["RunQuery"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query)
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
-
- request = firestore_pb2.RunQueryRequest(
- parent=parent,
- structured_query=structured_query,
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["run_query"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def write(
- self,
- requests,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Streams batches of document updates and deletes, in order.
-
- EXPERIMENTAL: This method interface might change in the future.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>> request = {'database': database}
- >>>
- >>> requests = [request]
- >>> for element in client.write(requests):
- ... # process element
- ... pass
-
- Args:
- requests (iterator[dict|google.cloud.firestore_v1.proto.firestore_pb2.WriteRequest]): The input objects. If a dict is provided, it must be of the
- same form as the protobuf message :class:`~google.cloud.firestore_v1.types.WriteRequest`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1.types.WriteResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "write" not in self._inner_api_calls:
- self._inner_api_calls[
- "write"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.write,
- default_retry=self._method_configs["Write"].retry,
- default_timeout=self._method_configs["Write"].timeout,
- client_info=self._client_info,
- )
-
- return self._inner_api_calls["write"](
- requests, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def listen(
- self,
- requests,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Listens to changes.
-
- EXPERIMENTAL: This method interface might change in the future.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>> request = {'database': database}
- >>>
- >>> requests = [request]
- >>> for element in client.listen(requests):
- ... # process element
- ... pass
-
- Args:
- requests (iterator[dict|google.cloud.firestore_v1.proto.firestore_pb2.ListenRequest]): The input objects. If a dict is provided, it must be of the
- same form as the protobuf message :class:`~google.cloud.firestore_v1.types.ListenRequest`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1.types.ListenResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "listen" not in self._inner_api_calls:
- self._inner_api_calls[
- "listen"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.listen,
- default_retry=self._method_configs["Listen"].retry,
- default_timeout=self._method_configs["Listen"].timeout,
- client_info=self._client_info,
- )
-
- return self._inner_api_calls["listen"](
- requests, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def list_collection_ids(
- self,
- parent,
- page_size=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists all the collection IDs underneath a document.
-
- Example:
- >>> from google.cloud import firestore_v1
- >>>
- >>> client = firestore_v1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_collection_ids(parent):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_collection_ids(parent).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The parent document. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- For example:
- ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`str` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_collection_ids" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_collection_ids"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_collection_ids,
- default_retry=self._method_configs["ListCollectionIds"].retry,
- default_timeout=self._method_configs["ListCollectionIds"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.ListCollectionIdsRequest(
- parent=parent, page_size=page_size
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_collection_ids"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="collection_ids",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
diff --git a/google/cloud/firestore_v1/gapic/firestore_client_config.py b/google/cloud/firestore_v1/gapic/firestore_client_config.py
deleted file mode 100644
index 53f9f267dd..0000000000
--- a/google/cloud/firestore_v1/gapic/firestore_client_config.py
+++ /dev/null
@@ -1,97 +0,0 @@
-config = {
- "interfaces": {
- "google.firestore.v1.Firestore": {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 60000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 60000,
- "total_timeout_millis": 600000,
- },
- "streaming": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 60000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 60000,
- "total_timeout_millis": 600000,
- },
- },
- "methods": {
- "GetDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "ListDocuments": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "CreateDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "UpdateDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "DeleteDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "BatchGetDocuments": {
- "timeout_millis": 300000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "streaming",
- },
- "BeginTransaction": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "Commit": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "Rollback": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "RunQuery": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "streaming",
- },
- "Write": {
- "timeout_millis": 86400000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "streaming",
- },
- "Listen": {
- "timeout_millis": 86400000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "streaming",
- },
- "ListCollectionIds": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- },
- }
- }
-}
diff --git a/google/cloud/firestore_v1/gapic/transports/__init__.py b/google/cloud/firestore_v1/gapic/transports/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py b/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py
deleted file mode 100644
index ce730eaacc..0000000000
--- a/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import google.api_core.grpc_helpers
-
-from google.cloud.firestore_v1.proto import firestore_pb2_grpc
-
-
-class FirestoreGrpcTransport(object):
- """gRPC transport class providing stubs for
- google.firestore.v1 Firestore API.
-
- The transport provides access to the raw gRPC stubs,
- which can be used to take advantage of advanced
- features of gRPC.
- """
-
- # The scopes needed to make gRPC calls to all of the methods defined
- # in this service.
- _OAUTH_SCOPES = (
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/datastore",
- )
-
- def __init__(
- self, channel=None, credentials=None, address="firestore.googleapis.com:443"
- ):
- """Instantiate the transport class.
-
- Args:
- channel (grpc.Channel): A ``Channel`` instance through
- which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- address (str): The address where the service is hosted.
- """
- # If both `channel` and `credentials` are specified, raise an
- # exception (channels come with credentials baked in already).
- if channel is not None and credentials is not None:
- raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
- )
-
- # Create the channel.
- if channel is None:
- channel = self.create_channel(
- address=address,
- credentials=credentials,
- options={
- "grpc.max_send_message_length": -1,
- "grpc.max_receive_message_length": -1,
- }.items(),
- )
-
- self._channel = channel
-
- # gRPC uses objects called "stubs" that are bound to the
- # channel and provide a basic method for each RPC.
- self._stubs = {"firestore_stub": firestore_pb2_grpc.FirestoreStub(channel)}
-
- @classmethod
- def create_channel(
- cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
- ):
- """Create and return a gRPC channel object.
-
- Args:
- address (str): The host for the channel to use.
- credentials (~.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If
- none are specified, the client will attempt to ascertain
- the credentials from the environment.
- kwargs (dict): Keyword arguments, which are passed to the
- channel creation.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return google.api_core.grpc_helpers.create_channel(
- address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
- )
-
- @property
- def channel(self):
- """The gRPC channel used by the transport.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return self._channel
-
- @property
- def get_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.get_document`.
-
- Gets a single document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].GetDocument
-
- @property
- def list_documents(self):
- """Return the gRPC stub for :meth:`FirestoreClient.list_documents`.
-
- Lists documents.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].ListDocuments
-
- @property
- def create_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.create_document`.
-
- Creates a new document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].CreateDocument
-
- @property
- def update_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.update_document`.
-
- Updates or inserts a document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].UpdateDocument
-
- @property
- def delete_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.delete_document`.
-
- Deletes a document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].DeleteDocument
-
- @property
- def batch_get_documents(self):
- """Return the gRPC stub for :meth:`FirestoreClient.batch_get_documents`.
-
- Gets multiple documents.
-
- Documents returned by this method are not guaranteed to be returned in the
- same order that they were requested.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].BatchGetDocuments
-
- @property
- def begin_transaction(self):
- """Return the gRPC stub for :meth:`FirestoreClient.begin_transaction`.
-
- Starts a new transaction.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].BeginTransaction
-
- @property
- def commit(self):
- """Return the gRPC stub for :meth:`FirestoreClient.commit`.
-
- Commits a transaction, while optionally updating documents.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Commit
-
- @property
- def rollback(self):
- """Return the gRPC stub for :meth:`FirestoreClient.rollback`.
-
- Rolls back a transaction.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Rollback
-
- @property
- def run_query(self):
- """Return the gRPC stub for :meth:`FirestoreClient.run_query`.
-
- Runs a query.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].RunQuery
-
- @property
- def write(self):
- """Return the gRPC stub for :meth:`FirestoreClient.write`.
-
- Streams batches of document updates and deletes, in order.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Write
-
- @property
- def listen(self):
- """Return the gRPC stub for :meth:`FirestoreClient.listen`.
-
- Listens to changes.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Listen
-
- @property
- def list_collection_ids(self):
- """Return the gRPC stub for :meth:`FirestoreClient.list_collection_ids`.
-
- Lists all the collection IDs underneath a document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].ListCollectionIds
diff --git a/google/cloud/firestore_v1/order.py b/google/cloud/firestore_v1/order.py
index d70293a36a..37052f9f57 100644
--- a/google/cloud/firestore_v1/order.py
+++ b/google/cloud/firestore_v1/order.py
@@ -15,6 +15,7 @@
from enum import Enum
from google.cloud.firestore_v1._helpers import decode_value
import math
+from typing import Any
class TypeOrder(Enum):
@@ -31,8 +32,8 @@ class TypeOrder(Enum):
OBJECT = 9
@staticmethod
- def from_value(value):
- v = value.WhichOneof("value_type")
+ def from_value(value) -> Any:
+ v = value._pb.WhichOneof("value_type")
lut = {
"null_value": TypeOrder.NULL,
@@ -49,7 +50,7 @@ def from_value(value):
}
if v not in lut:
- raise ValueError("Could not detect value type for " + v)
+ raise ValueError(f"Could not detect value type for {v}")
return lut[v]
@@ -59,7 +60,7 @@ class Order(object):
"""
@classmethod
- def compare(cls, left, right):
+ def compare(cls, left, right) -> int:
"""
Main comparison function for all Firestore types.
@return -1 is left < right, 0 if left == right, otherwise 1
@@ -73,7 +74,7 @@ def compare(cls, left, right):
return -1
return 1
- value_type = left.WhichOneof("value_type")
+ value_type = left._pb.WhichOneof("value_type")
if value_type == "null_value":
return 0 # nulls are all equal
@@ -98,19 +99,19 @@ def compare(cls, left, right):
elif value_type == "map_value":
return cls.compare_objects(left, right)
else:
- raise ValueError("Unknown ``value_type``", str(value_type))
+ raise ValueError(f"Unknown ``value_type`` {value_type}")
@staticmethod
- def compare_blobs(left, right):
+ def compare_blobs(left, right) -> int:
left_bytes = left.bytes_value
right_bytes = right.bytes_value
return Order._compare_to(left_bytes, right_bytes)
@staticmethod
- def compare_timestamps(left, right):
- left = left.timestamp_value
- right = right.timestamp_value
+ def compare_timestamps(left, right) -> Any:
+ left = left._pb.timestamp_value
+ right = right._pb.timestamp_value
seconds = Order._compare_to(left.seconds or 0, right.seconds or 0)
if seconds != 0:
@@ -119,7 +120,7 @@ def compare_timestamps(left, right):
return Order._compare_to(left.nanos or 0, right.nanos or 0)
@staticmethod
- def compare_geo_points(left, right):
+ def compare_geo_points(left, right) -> Any:
left_value = decode_value(left, None)
right_value = decode_value(right, None)
cmp = (left_value.latitude > right_value.latitude) - (
@@ -133,7 +134,7 @@ def compare_geo_points(left, right):
)
@staticmethod
- def compare_resource_paths(left, right):
+ def compare_resource_paths(left, right) -> int:
left = left.reference_value
right = right.reference_value
@@ -152,7 +153,7 @@ def compare_resource_paths(left, right):
return (left_length > right_length) - (left_length < right_length)
@staticmethod
- def compare_arrays(left, right):
+ def compare_arrays(left, right) -> int:
l_values = left.array_value.values
r_values = right.array_value.values
@@ -165,7 +166,7 @@ def compare_arrays(left, right):
return Order._compare_to(len(l_values), len(r_values))
@staticmethod
- def compare_objects(left, right):
+ def compare_objects(left, right) -> int:
left_fields = left.map_value.fields
right_fields = right.map_value.fields
@@ -183,13 +184,13 @@ def compare_objects(left, right):
return Order._compare_to(len(left_fields), len(right_fields))
@staticmethod
- def compare_numbers(left, right):
+ def compare_numbers(left, right) -> int:
left_value = decode_value(left, None)
right_value = decode_value(right, None)
return Order.compare_doubles(left_value, right_value)
@staticmethod
- def compare_doubles(left, right):
+ def compare_doubles(left, right) -> int:
if math.isnan(left):
if math.isnan(right):
return 0
@@ -200,7 +201,7 @@ def compare_doubles(left, right):
return Order._compare_to(left, right)
@staticmethod
- def _compare_to(left, right):
+ def _compare_to(left, right) -> int:
# We can't just use cmp(left, right) because cmp doesn't exist
# in Python 3, so this is an equivalent suggested by
# https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
diff --git a/google/cloud/firestore_v1/proto/__init__.py b/google/cloud/firestore_v1/proto/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1/proto/common.proto b/google/cloud/firestore_v1/proto/common.proto
deleted file mode 100644
index 8e2ef27ff2..0000000000
--- a/google/cloud/firestore_v1/proto/common.proto
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1;
-
-import "google/protobuf/timestamp.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "CommonProto";
-option java_package = "com.google.firestore.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1";
-
-// A set of field paths on a document.
-// Used to restrict a get or update operation on a document to a subset of its
-// fields.
-// This is different from standard field masks, as this is always scoped to a
-// [Document][google.firestore.v1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1.Value].
-message DocumentMask {
- // The list of field paths in the mask. See [Document.fields][google.firestore.v1.Document.fields] for a field
- // path syntax reference.
- repeated string field_paths = 1;
-}
-
-// A precondition on a document, used for conditional operations.
-message Precondition {
- // The type of precondition.
- oneof condition_type {
- // When set to `true`, the target document must exist.
- // When set to `false`, the target document must not exist.
- bool exists = 1;
-
- // When set, the target document must exist and have been last updated at
- // that time.
- google.protobuf.Timestamp update_time = 2;
- }
-}
-
-// Options for creating a new transaction.
-message TransactionOptions {
- // Options for a transaction that can be used to read and write documents.
- message ReadWrite {
- // An optional transaction to retry.
- bytes retry_transaction = 1;
- }
-
- // Options for a transaction that can only be used to read documents.
- message ReadOnly {
- // The consistency mode for this transaction. If not set, defaults to strong
- // consistency.
- oneof consistency_selector {
- // Reads documents at the given time.
- // This may not be older than 60 seconds.
- google.protobuf.Timestamp read_time = 2;
- }
- }
-
- // The mode of the transaction.
- oneof mode {
- // The transaction can only be used for read operations.
- ReadOnly read_only = 2;
-
- // The transaction can be used for both read and write operations.
- ReadWrite read_write = 3;
- }
-}
diff --git a/google/cloud/firestore_v1/proto/common_pb2.py b/google/cloud/firestore_v1/proto/common_pb2.py
deleted file mode 100644
index 3d25c5b80c..0000000000
--- a/google/cloud/firestore_v1/proto/common_pb2.py
+++ /dev/null
@@ -1,454 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1/proto/common.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1/proto/common.proto",
- package="google.firestore.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\027com.google.firestore.v1B\013CommonProtoP\001Z fields = 2;
-
- // Output only. The time at which the document was created.
- //
- // This value increases monotonically when a document is deleted then
- // recreated. It can also be compared to values from other documents and
- // the `read_time` of a query.
- google.protobuf.Timestamp create_time = 3;
-
- // Output only. The time at which the document was last changed.
- //
- // This value is initially set to the `create_time` then increases
- // monotonically with each change to the document. It can also be
- // compared to values from other documents and the `read_time` of a query.
- google.protobuf.Timestamp update_time = 4;
-}
-
-// A message that can hold any of the supported value types.
-message Value {
- // Must have a value set.
- oneof value_type {
- // A null value.
- google.protobuf.NullValue null_value = 11;
-
- // A boolean value.
- bool boolean_value = 1;
-
- // An integer value.
- int64 integer_value = 2;
-
- // A double value.
- double double_value = 3;
-
- // A timestamp value.
- //
- // Precise only to microseconds. When stored, any additional precision is
- // rounded down.
- google.protobuf.Timestamp timestamp_value = 10;
-
- // A string value.
- //
- // The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes.
- // Only the first 1,500 bytes of the UTF-8 representation are considered by
- // queries.
- string string_value = 17;
-
- // A bytes value.
- //
- // Must not exceed 1 MiB - 89 bytes.
- // Only the first 1,500 bytes are considered by queries.
- bytes bytes_value = 18;
-
- // A reference to a document. For example:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string reference_value = 5;
-
- // A geo point value representing a point on the surface of Earth.
- google.type.LatLng geo_point_value = 8;
-
- // An array value.
- //
- // Cannot directly contain another array value, though can contain an
- // map which contains another array.
- ArrayValue array_value = 9;
-
- // A map value.
- MapValue map_value = 6;
- }
-}
-
-// An array value.
-message ArrayValue {
- // Values in the array.
- repeated Value values = 1;
-}
-
-// A map value.
-message MapValue {
- // The map's fields.
- //
- // The map keys represent field names. Field names matching the regular
- // expression `__.*__` are reserved. Reserved field names are forbidden except
- // in certain documented contexts. The map keys, represented as UTF-8, must
- // not exceed 1,500 bytes and cannot be empty.
- map fields = 1;
-}
diff --git a/google/cloud/firestore_v1/proto/document_pb2.py b/google/cloud/firestore_v1/proto/document_pb2.py
deleted file mode 100644
index 82111a8229..0000000000
--- a/google/cloud/firestore_v1/proto/document_pb2.py
+++ /dev/null
@@ -1,798 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1/proto/document.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1/proto/document.proto",
- package="google.firestore.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\027com.google.firestore.v1B\rDocumentProtoP\001Z labels = 5;
-}
-
-// The response for [Firestore.Write][google.firestore.v1.Firestore.Write].
-message WriteResponse {
- // The ID of the stream.
- // Only set on the first message, when a new stream was created.
- string stream_id = 1;
-
- // A token that represents the position of this response in the stream.
- // This can be used by a client to resume the stream at this point.
- //
- // This field is always set.
- bytes stream_token = 2;
-
- // The result of applying the writes.
- //
- // This i-th write result corresponds to the i-th write in the
- // request.
- repeated WriteResult write_results = 3;
-
- // The time at which the commit occurred. Any read with an equal or greater
- // `read_time` is guaranteed to see the effects of the write.
- google.protobuf.Timestamp commit_time = 4;
-}
-
-// A request for [Firestore.Listen][google.firestore.v1.Firestore.Listen]
-message ListenRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The supported target changes.
- oneof target_change {
- // A target to add to this stream.
- Target add_target = 2;
-
- // The ID of a target to remove from this stream.
- int32 remove_target = 3;
- }
-
- // Labels associated with this target change.
- map labels = 4;
-}
-
-// The response for [Firestore.Listen][google.firestore.v1.Firestore.Listen].
-message ListenResponse {
- // The supported responses.
- oneof response_type {
- // Targets have changed.
- TargetChange target_change = 2;
-
- // A [Document][google.firestore.v1.Document] has changed.
- DocumentChange document_change = 3;
-
- // A [Document][google.firestore.v1.Document] has been deleted.
- DocumentDelete document_delete = 4;
-
- // A [Document][google.firestore.v1.Document] has been removed from a target (because it is no longer
- // relevant to that target).
- DocumentRemove document_remove = 6;
-
- // A filter to apply to the set of documents previously returned for the
- // given target.
- //
- // Returned when documents may have been removed from the given target, but
- // the exact documents are unknown.
- ExistenceFilter filter = 5;
- }
-}
-
-// A specification of a set of documents to listen to.
-message Target {
- // A target specified by a set of documents names.
- message DocumentsTarget {
- // The names of the documents to retrieve. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // The request will fail if any of the document is not a child resource of
- // the given `database`. Duplicate names will be elided.
- repeated string documents = 2;
- }
-
- // A target specified by a query.
- message QueryTarget {
- // The parent resource name. In the format:
- // `projects/{project_id}/databases/{database_id}/documents` or
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // For example:
- // `projects/my-project/databases/my-database/documents` or
- // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
- string parent = 1;
-
- // The query to run.
- oneof query_type {
- // A structured query.
- StructuredQuery structured_query = 2;
- }
- }
-
- // The type of target to listen to.
- oneof target_type {
- // A target specified by a query.
- QueryTarget query = 2;
-
- // A target specified by a set of document names.
- DocumentsTarget documents = 3;
- }
-
- // When to start listening.
- //
- // If not specified, all matching Documents are returned before any
- // subsequent changes.
- oneof resume_type {
- // A resume token from a prior [TargetChange][google.firestore.v1.TargetChange] for an identical target.
- //
- // Using a resume token with a different target is unsupported and may fail.
- bytes resume_token = 4;
-
- // Start listening after a specific `read_time`.
- //
- // The client must know the state of matching documents at this time.
- google.protobuf.Timestamp read_time = 11;
- }
-
- // The target ID that identifies the target on the stream. Must be a positive
- // number and non-zero.
- int32 target_id = 5;
-
- // If the target should be removed once it is current and consistent.
- bool once = 6;
-}
-
-// Targets being watched have changed.
-message TargetChange {
- // The type of change.
- enum TargetChangeType {
- // No change has occurred. Used only to send an updated `resume_token`.
- NO_CHANGE = 0;
-
- // The targets have been added.
- ADD = 1;
-
- // The targets have been removed.
- REMOVE = 2;
-
- // The targets reflect all changes committed before the targets were added
- // to the stream.
- //
- // This will be sent after or with a `read_time` that is greater than or
- // equal to the time at which the targets were added.
- //
- // Listeners can wait for this change if read-after-write semantics
- // are desired.
- CURRENT = 3;
-
- // The targets have been reset, and a new initial state for the targets
- // will be returned in subsequent changes.
- //
- // After the initial state is complete, `CURRENT` will be returned even
- // if the target was previously indicated to be `CURRENT`.
- RESET = 4;
- }
-
- // The type of change that occurred.
- TargetChangeType target_change_type = 1;
-
- // The target IDs of targets that have changed.
- //
- // If empty, the change applies to all targets.
- //
- // The order of the target IDs is not defined.
- repeated int32 target_ids = 2;
-
- // The error that resulted in this change, if applicable.
- google.rpc.Status cause = 3;
-
- // A token that can be used to resume the stream for the given `target_ids`,
- // or all targets if `target_ids` is empty.
- //
- // Not set on every target change.
- bytes resume_token = 4;
-
- // The consistent `read_time` for the given `target_ids` (omitted when the
- // target_ids are not at a consistent snapshot).
- //
- // The stream is guaranteed to send a `read_time` with `target_ids` empty
- // whenever the entire stream reaches a new consistent snapshot. ADD,
- // CURRENT, and RESET messages are guaranteed to (eventually) result in a
- // new consistent snapshot (while NO_CHANGE and REMOVE messages are not).
- //
- // For a given stream, `read_time` is guaranteed to be monotonically
- // increasing.
- google.protobuf.Timestamp read_time = 6;
-}
-
-// The request for [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
-message ListCollectionIdsRequest {
- // Required. The parent document. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // For example:
- // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The maximum number of results to return.
- int32 page_size = 2;
-
- // A page token. Must be a value from
- // [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse].
- string page_token = 3;
-}
-
-// The response from [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
-message ListCollectionIdsResponse {
- // The collection ids.
- repeated string collection_ids = 1;
-
- // A page token that may be used to continue the list.
- string next_page_token = 2;
-}
diff --git a/google/cloud/firestore_v1/proto/firestore_pb2.py b/google/cloud/firestore_v1/proto/firestore_pb2.py
deleted file mode 100644
index 06e39be5b1..0000000000
--- a/google/cloud/firestore_v1/proto/firestore_pb2.py
+++ /dev/null
@@ -1,3806 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1/proto/firestore.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.api import client_pb2 as google_dot_api_dot_client__pb2
-from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
-from google.cloud.firestore_v1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- write_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_write__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1/proto/firestore.proto",
- package="google.firestore.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\027com.google.firestore.v1B\016FirestoreProtoP\001Z\n\x06labels\x18\x04 \x03(\x0b\x32..google.firestore.v1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xd5\x02\n\x0eListenResponse\x12:\n\rtarget_change\x18\x02 \x01(\x0b\x32!.google.firestore.v1.TargetChangeH\x00\x12>\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32#.google.firestore.v1.DocumentChangeH\x00\x12>\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32#.google.firestore.v1.DocumentDeleteH\x00\x12>\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32#.google.firestore.v1.DocumentRemoveH\x00\x12\x36\n\x06\x66ilter\x18\x05 \x01(\x0b\x32$.google.firestore.v1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xa1\x03\n\x06Target\x12\x38\n\x05query\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.Target.QueryTargetH\x00\x12@\n\tdocuments\x18\x03 \x01(\x0b\x32+.google.firestore.v1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1am\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x10structured_query\x18\x02 \x01(\x0b\x32$.google.firestore.v1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaa\x02\n\x0cTargetChange\x12N\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x32.google.firestore.v1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"V\n\x18ListCollectionIdsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd7\x13\n\tFirestore\x12\x8f\x01\n\x0bGetDocument\x12\'.google.firestore.v1.GetDocumentRequest\x1a\x1d.google.firestore.v1.Document"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb2\x01\n\rListDocuments\x12).google.firestore.v1.ListDocumentsRequest\x1a*.google.firestore.v1.ListDocumentsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xaf\x01\n\x0e\x43reateDocument\x12*.google.firestore.v1.CreateDocumentRequest\x1a\x1d.google.firestore.v1.Document"R\x82\xd3\xe4\x93\x02L"@/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xbf\x01\n\x0eUpdateDocument\x12*.google.firestore.v1.UpdateDocumentRequest\x1a\x1d.google.firestore.v1.Document"b\x82\xd3\xe4\x93\x02\x45\x32\x39/v1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\xda\x41\x14\x64ocument,update_mask\x12\x95\x01\n\x0e\x44\x65leteDocument\x12*.google.firestore.v1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"?\x82\xd3\xe4\x93\x02\x32*0/v1/{name=projects/*/databases/*/documents/*/**}\xda\x41\x04name\x12\xb9\x01\n\x11\x42\x61tchGetDocuments\x12-.google.firestore.v1.BatchGetDocumentsRequest\x1a..google.firestore.v1.BatchGetDocumentsResponse"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xc7\x01\n\x10\x42\x65ginTransaction\x12,.google.firestore.v1.BeginTransactionRequest\x1a-.google.firestore.v1.BeginTransactionResponse"V\x82\xd3\xe4\x93\x02\x45"@/v1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\xda\x41\x08\x64\x61tabase\x12\xa6\x01\n\x06\x43ommit\x12".google.firestore.v1.CommitRequest\x1a#.google.firestore.v1.CommitResponse"S\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:commit:\x01*\xda\x41\x0f\x64\x61tabase,writes\x12\xa4\x01\n\x08Rollback\x12$.google.firestore.v1.RollbackRequest\x1a\x16.google.protobuf.Empty"Z\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:rollback:\x01*\xda\x41\x14\x64\x61tabase,transaction\x12\xdf\x01\n\x08RunQuery\x12$.google.firestore.v1.RunQueryRequest\x1a%.google.firestore.v1.RunQueryResponse"\x83\x01\x82\xd3\xe4\x93\x02}"6/v1/{parent=projects/*/databases/*/documents}:runQuery:\x01*Z@";/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\x94\x01\n\x05Write\x12!.google.firestore.v1.WriteRequest\x1a".google.firestore.v1.WriteResponse"@\x82\xd3\xe4\x93\x02:"5/v1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\x98\x01\n\x06Listen\x12".google.firestore.v1.ListenRequest\x1a#.google.firestore.v1.ListenResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x94\x02\n\x11ListCollectionIds\x12-.google.firestore.v1.ListCollectionIdsRequest\x1a..google.firestore.v1.ListCollectionIdsResponse"\x9f\x01\x82\xd3\xe4\x93\x02\x8f\x01"?/v1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZI"D/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*\xda\x41\x06parent\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xa8\x01\n\x17\x63om.google.firestore.v1B\x0e\x46irestoreProtoP\x01Z 1` becomes
- // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`
- repeated Order order_by = 4;
-
- // A starting point for the query results.
- Cursor start_at = 7;
-
- // A end point for the query results.
- Cursor end_at = 8;
-
- // The number of results to skip.
- //
- // Applies before limit, but after all other constraints. Must be >= 0 if
- // specified.
- int32 offset = 6;
-
- // The maximum number of results to return.
- //
- // Applies after all other constraints.
- // Must be >= 0 if specified.
- google.protobuf.Int32Value limit = 5;
-}
-
-// A position in a query result set.
-message Cursor {
- // The values that represent a position, in the order they appear in
- // the order by clause of a query.
- //
- // Can contain fewer values than specified in the order by clause.
- repeated Value values = 1;
-
- // If the position is just before or just after the given values, relative
- // to the sort order defined by the query.
- bool before = 2;
-}
diff --git a/google/cloud/firestore_v1/proto/query_pb2.py b/google/cloud/firestore_v1/proto/query_pb2.py
deleted file mode 100644
index 6e1982629d..0000000000
--- a/google/cloud/firestore_v1/proto/query_pb2.py
+++ /dev/null
@@ -1,1200 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1/proto/query.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
-)
-from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1/proto/query.proto",
- package="google.firestore.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\027com.google.firestore.v1B\nQueryProtoP\001Z 1``
- becomes ``SELECT * FROM Foo WHERE A > 1 ORDER BY A,
- __name__``
- start_at:
- A starting point for the query results.
- end_at:
- A end point for the query results.
- offset:
- The number of results to skip. Applies before limit, but
- after all other constraints. Must be >= 0 if specified.
- limit:
- The maximum number of results to return. Applies after all
- other constraints. Must be >= 0 if specified.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1.StructuredQuery)
- ),
-)
-_sym_db.RegisterMessage(StructuredQuery)
-_sym_db.RegisterMessage(StructuredQuery.CollectionSelector)
-_sym_db.RegisterMessage(StructuredQuery.Filter)
-_sym_db.RegisterMessage(StructuredQuery.CompositeFilter)
-_sym_db.RegisterMessage(StructuredQuery.FieldFilter)
-_sym_db.RegisterMessage(StructuredQuery.UnaryFilter)
-_sym_db.RegisterMessage(StructuredQuery.Order)
-_sym_db.RegisterMessage(StructuredQuery.FieldReference)
-_sym_db.RegisterMessage(StructuredQuery.Projection)
-
-Cursor = _reflection.GeneratedProtocolMessageType(
- "Cursor",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CURSOR,
- __module__="google.cloud.firestore_v1.proto.query_pb2",
- __doc__="""A position in a query result set.
-
-
- Attributes:
- values:
- The values that represent a position, in the order they appear
- in the order by clause of a query. Can contain fewer values
- than specified in the order by clause.
- before:
- If the position is just before or just after the given values,
- relative to the sort order defined by the query.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1.Cursor)
- ),
-)
-_sym_db.RegisterMessage(Cursor)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1/proto/query_pb2_grpc.py b/google/cloud/firestore_v1/proto/query_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1/proto/query_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1/proto/test_v1_pb2.py b/google/cloud/firestore_v1/proto/test_v1_pb2.py
deleted file mode 100644
index 336bab9484..0000000000
--- a/google/cloud/firestore_v1/proto/test_v1_pb2.py
+++ /dev/null
@@ -1,2190 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: test_v1.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- firestore_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
-)
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="test_v1.proto",
- package="tests.v1",
- syntax="proto3",
- serialized_pb=_b(
- '\n\rtest_v1.proto\x12\x08tests.v1\x1a,google/cloud/firestore_v1/proto/common.proto\x1a.google/cloud/firestore_v1/proto/document.proto\x1a/google/cloud/firestore_v1/proto/firestore.proto\x1a+google/cloud/firestore_v1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"*\n\tTestSuite\x12\x1d\n\x05tests\x18\x01 \x03(\x0b\x32\x0e.tests.v1.Test"\xe0\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12 \n\x03get\x18\x02 \x01(\x0b\x32\x11.tests.v1.GetTestH\x00\x12&\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x14.tests.v1.CreateTestH\x00\x12 \n\x03set\x18\x04 \x01(\x0b\x32\x11.tests.v1.SetTestH\x00\x12&\n\x06update\x18\x05 \x01(\x0b\x32\x14.tests.v1.UpdateTestH\x00\x12\x31\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x19.tests.v1.UpdatePathsTestH\x00\x12&\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x14.tests.v1.DeleteTestH\x00\x12$\n\x05query\x18\x08 \x01(\x0b\x32\x13.tests.v1.QueryTestH\x00\x12&\n\x06listen\x18\t \x01(\x0b\x32\x14.tests.v1.ListenTestH\x00\x42\x06\n\x04test"Y\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x38\n\x07request\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.GetDocumentRequest"|\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\x9e\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12#\n\x06option\x18\x02 \x01(\x0b\x32\x13.tests.v1.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xb5\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xe6\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12(\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x13.tests.v1.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x33\n\x07request\x18\x05 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xa2\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"=\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12#\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x13.tests.v1.FieldPath"\x88\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12!\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\x10.tests.v1.Clause\x12\x33\n\x05query\x18\x03 \x01(\x0b\x32$.google.firestore.v1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xbd\x02\n\x06\x43lause\x12"\n\x06select\x18\x01 \x01(\x0b\x32\x10.tests.v1.SelectH\x00\x12 \n\x05where\x18\x02 \x01(\x0b\x32\x0f.tests.v1.WhereH\x00\x12%\n\x08order_by\x18\x03 \x01(\x0b\x32\x11.tests.v1.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12$\n\x08start_at\x18\x06 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12\'\n\x0bstart_after\x18\x07 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12"\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12&\n\nend_before\x18\t \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x42\x08\n\x06\x63lause"-\n\x06Select\x12#\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x13.tests.v1.FieldPath"J\n\x05Where\x12!\n\x04path\x18\x01 \x01(\x0b\x32\x13.tests.v1.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"?\n\x07OrderBy\x12!\n\x04path\x18\x01 \x01(\x0b\x32\x13.tests.v1.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"J\n\x06\x43ursor\x12+\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x15.tests.v1.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"}\n\nListenTest\x12\x36\n\tresponses\x18\x01 \x03(\x0b\x32#.google.firestore.v1.ListenResponse\x12%\n\tsnapshots\x18\x02 \x03(\x0b\x32\x12.tests.v1.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x8c\x01\n\x08Snapshot\x12+\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\x1d.google.firestore.v1.Document\x12$\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x13.tests.v1.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xc9\x01\n\tDocChange\x12&\n\x04kind\x18\x01 \x01(\x0e\x32\x18.tests.v1.DocChange.Kind\x12*\n\x03\x64oc\x18\x02 \x01(\x0b\x32\x1d.google.firestore.v1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
- ),
- dependencies=[
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- ],
-)
-
-
-_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
- name="Kind",
- full_name="tests.v1.DocChange.Kind",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ADDED", index=1, number=1, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REMOVED", index=2, number=2, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="MODIFIED", index=3, number=3, options=None, type=None
- ),
- ],
- containing_type=None,
- options=None,
- serialized_start=2875,
- serialized_end=2941,
-)
-_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
-
-
-_TESTSUITE = _descriptor.Descriptor(
- name="TestSuite",
- full_name="tests.v1.TestSuite",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="tests",
- full_name="tests.v1.TestSuite.tests",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=248,
- serialized_end=290,
-)
-
-
-_TEST = _descriptor.Descriptor(
- name="Test",
- full_name="tests.v1.Test",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="description",
- full_name="tests.v1.Test.description",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="get",
- full_name="tests.v1.Test.get",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="create",
- full_name="tests.v1.Test.create",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="set",
- full_name="tests.v1.Test.set",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update",
- full_name="tests.v1.Test.update",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_paths",
- full_name="tests.v1.Test.update_paths",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="delete",
- full_name="tests.v1.Test.delete",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query",
- full_name="tests.v1.Test.query",
- index=7,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="listen",
- full_name="tests.v1.Test.listen",
- index=8,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="test",
- full_name="tests.v1.Test.test",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=293,
- serialized_end=645,
-)
-
-
-_GETTEST = _descriptor.Descriptor(
- name="GetTest",
- full_name="tests.v1.GetTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1.GetTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1.GetTest.request",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=647,
- serialized_end=736,
-)
-
-
-_CREATETEST = _descriptor.Descriptor(
- name="CreateTest",
- full_name="tests.v1.CreateTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1.CreateTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1.CreateTest.json_data",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1.CreateTest.request",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.CreateTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=738,
- serialized_end=862,
-)
-
-
-_SETTEST = _descriptor.Descriptor(
- name="SetTest",
- full_name="tests.v1.SetTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1.SetTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="option",
- full_name="tests.v1.SetTest.option",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1.SetTest.json_data",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1.SetTest.request",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.SetTest.is_error",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=865,
- serialized_end=1023,
-)
-
-
-_UPDATETEST = _descriptor.Descriptor(
- name="UpdateTest",
- full_name="tests.v1.UpdateTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1.UpdateTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="tests.v1.UpdateTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1.UpdateTest.json_data",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1.UpdateTest.request",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.UpdateTest.is_error",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1026,
- serialized_end=1207,
-)
-
-
-_UPDATEPATHSTEST = _descriptor.Descriptor(
- name="UpdatePathsTest",
- full_name="tests.v1.UpdatePathsTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1.UpdatePathsTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="tests.v1.UpdatePathsTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field_paths",
- full_name="tests.v1.UpdatePathsTest.field_paths",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_values",
- full_name="tests.v1.UpdatePathsTest.json_values",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1.UpdatePathsTest.request",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.UpdatePathsTest.is_error",
- index=5,
- number=6,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1210,
- serialized_end=1440,
-)
-
-
-_DELETETEST = _descriptor.Descriptor(
- name="DeleteTest",
- full_name="tests.v1.DeleteTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1.DeleteTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="tests.v1.DeleteTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1.DeleteTest.request",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.DeleteTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1443,
- serialized_end=1605,
-)
-
-
-_SETOPTION = _descriptor.Descriptor(
- name="SetOption",
- full_name="tests.v1.SetOption",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="all",
- full_name="tests.v1.SetOption.all",
- index=0,
- number=1,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="tests.v1.SetOption.fields",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1607,
- serialized_end=1668,
-)
-
-
-_QUERYTEST = _descriptor.Descriptor(
- name="QueryTest",
- full_name="tests.v1.QueryTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="coll_path",
- full_name="tests.v1.QueryTest.coll_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="clauses",
- full_name="tests.v1.QueryTest.clauses",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query",
- full_name="tests.v1.QueryTest.query",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.QueryTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1671,
- serialized_end=1807,
-)
-
-
-_CLAUSE = _descriptor.Descriptor(
- name="Clause",
- full_name="tests.v1.Clause",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="select",
- full_name="tests.v1.Clause.select",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="where",
- full_name="tests.v1.Clause.where",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="order_by",
- full_name="tests.v1.Clause.order_by",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="offset",
- full_name="tests.v1.Clause.offset",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="limit",
- full_name="tests.v1.Clause.limit",
- index=4,
- number=5,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_at",
- full_name="tests.v1.Clause.start_at",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_after",
- full_name="tests.v1.Clause.start_after",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_at",
- full_name="tests.v1.Clause.end_at",
- index=7,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_before",
- full_name="tests.v1.Clause.end_before",
- index=8,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="clause",
- full_name="tests.v1.Clause.clause",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=1810,
- serialized_end=2127,
-)
-
-
-_SELECT = _descriptor.Descriptor(
- name="Select",
- full_name="tests.v1.Select",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="tests.v1.Select.fields",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2129,
- serialized_end=2174,
-)
-
-
-_WHERE = _descriptor.Descriptor(
- name="Where",
- full_name="tests.v1.Where",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="tests.v1.Where.path",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="op",
- full_name="tests.v1.Where.op",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_value",
- full_name="tests.v1.Where.json_value",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2176,
- serialized_end=2250,
-)
-
-
-_ORDERBY = _descriptor.Descriptor(
- name="OrderBy",
- full_name="tests.v1.OrderBy",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="tests.v1.OrderBy.path",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="direction",
- full_name="tests.v1.OrderBy.direction",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2252,
- serialized_end=2315,
-)
-
-
-_CURSOR = _descriptor.Descriptor(
- name="Cursor",
- full_name="tests.v1.Cursor",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_snapshot",
- full_name="tests.v1.Cursor.doc_snapshot",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_values",
- full_name="tests.v1.Cursor.json_values",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2317,
- serialized_end=2391,
-)
-
-
-_DOCSNAPSHOT = _descriptor.Descriptor(
- name="DocSnapshot",
- full_name="tests.v1.DocSnapshot",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="tests.v1.DocSnapshot.path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1.DocSnapshot.json_data",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2393,
- serialized_end=2439,
-)
-
-
-_FIELDPATH = _descriptor.Descriptor(
- name="FieldPath",
- full_name="tests.v1.FieldPath",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field",
- full_name="tests.v1.FieldPath.field",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2441,
- serialized_end=2467,
-)
-
-
-_LISTENTEST = _descriptor.Descriptor(
- name="ListenTest",
- full_name="tests.v1.ListenTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="responses",
- full_name="tests.v1.ListenTest.responses",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="snapshots",
- full_name="tests.v1.ListenTest.snapshots",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1.ListenTest.is_error",
- index=2,
- number=3,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2469,
- serialized_end=2594,
-)
-
-
-_SNAPSHOT = _descriptor.Descriptor(
- name="Snapshot",
- full_name="tests.v1.Snapshot",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="docs",
- full_name="tests.v1.Snapshot.docs",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="changes",
- full_name="tests.v1.Snapshot.changes",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="tests.v1.Snapshot.read_time",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2597,
- serialized_end=2737,
-)
-
-
-_DOCCHANGE = _descriptor.Descriptor(
- name="DocChange",
- full_name="tests.v1.DocChange",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="kind",
- full_name="tests.v1.DocChange.kind",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="doc",
- full_name="tests.v1.DocChange.doc",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="old_index",
- full_name="tests.v1.DocChange.old_index",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="new_index",
- full_name="tests.v1.DocChange.new_index",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_DOCCHANGE_KIND],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2740,
- serialized_end=2941,
-)
-
-_TESTSUITE.fields_by_name["tests"].message_type = _TEST
-_TEST.fields_by_name["get"].message_type = _GETTEST
-_TEST.fields_by_name["create"].message_type = _CREATETEST
-_TEST.fields_by_name["set"].message_type = _SETTEST
-_TEST.fields_by_name["update"].message_type = _UPDATETEST
-_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
-_TEST.fields_by_name["delete"].message_type = _DELETETEST
-_TEST.fields_by_name["query"].message_type = _QUERYTEST
-_TEST.fields_by_name["listen"].message_type = _LISTENTEST
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
-_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
-_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
-_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
-_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
-_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
-_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
-_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
-_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
-_GETTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
-)
-_CREATETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_SETTEST.fields_by_name["option"].message_type = _SETOPTION
-_SETTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_UPDATETEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_UPDATETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_UPDATEPATHSTEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
-_UPDATEPATHSTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_DELETETEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_DELETETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
-_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
-_QUERYTEST.fields_by_name[
- "query"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
-)
-_CLAUSE.fields_by_name["select"].message_type = _SELECT
-_CLAUSE.fields_by_name["where"].message_type = _WHERE
-_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
-_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
-_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
-_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
-_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
-_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
-_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
-_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
-_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
-_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
-_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
-_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
- "clause"
-]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
-_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
-_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
-_WHERE.fields_by_name["path"].message_type = _FIELDPATH
-_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
-_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
-_LISTENTEST.fields_by_name[
- "responses"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
-)
-_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
-_SNAPSHOT.fields_by_name[
- "docs"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
-_SNAPSHOT.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
-_DOCCHANGE.fields_by_name[
- "doc"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_DOCCHANGE_KIND.containing_type = _DOCCHANGE
-DESCRIPTOR.message_types_by_name["TestSuite"] = _TESTSUITE
-DESCRIPTOR.message_types_by_name["Test"] = _TEST
-DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
-DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
-DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
-DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
-DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
-DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
-DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
-DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
-DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
-DESCRIPTOR.message_types_by_name["Select"] = _SELECT
-DESCRIPTOR.message_types_by_name["Where"] = _WHERE
-DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
-DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
-DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
-DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
-DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
-DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
-DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-TestSuite = _reflection.GeneratedProtocolMessageType(
- "TestSuite",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TESTSUITE,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.TestSuite)
- ),
-)
-_sym_db.RegisterMessage(TestSuite)
-
-Test = _reflection.GeneratedProtocolMessageType(
- "Test",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.Test)
- ),
-)
-_sym_db.RegisterMessage(Test)
-
-GetTest = _reflection.GeneratedProtocolMessageType(
- "GetTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETTEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.GetTest)
- ),
-)
-_sym_db.RegisterMessage(GetTest)
-
-CreateTest = _reflection.GeneratedProtocolMessageType(
- "CreateTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATETEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.CreateTest)
- ),
-)
-_sym_db.RegisterMessage(CreateTest)
-
-SetTest = _reflection.GeneratedProtocolMessageType(
- "SetTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SETTEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.SetTest)
- ),
-)
-_sym_db.RegisterMessage(SetTest)
-
-UpdateTest = _reflection.GeneratedProtocolMessageType(
- "UpdateTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATETEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.UpdateTest)
- ),
-)
-_sym_db.RegisterMessage(UpdateTest)
-
-UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
- "UpdatePathsTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEPATHSTEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.UpdatePathsTest)
- ),
-)
-_sym_db.RegisterMessage(UpdatePathsTest)
-
-DeleteTest = _reflection.GeneratedProtocolMessageType(
- "DeleteTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETETEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.DeleteTest)
- ),
-)
-_sym_db.RegisterMessage(DeleteTest)
-
-SetOption = _reflection.GeneratedProtocolMessageType(
- "SetOption",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SETOPTION,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.SetOption)
- ),
-)
-_sym_db.RegisterMessage(SetOption)
-
-QueryTest = _reflection.GeneratedProtocolMessageType(
- "QueryTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_QUERYTEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.QueryTest)
- ),
-)
-_sym_db.RegisterMessage(QueryTest)
-
-Clause = _reflection.GeneratedProtocolMessageType(
- "Clause",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CLAUSE,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.Clause)
- ),
-)
-_sym_db.RegisterMessage(Clause)
-
-Select = _reflection.GeneratedProtocolMessageType(
- "Select",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SELECT,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.Select)
- ),
-)
-_sym_db.RegisterMessage(Select)
-
-Where = _reflection.GeneratedProtocolMessageType(
- "Where",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WHERE,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.Where)
- ),
-)
-_sym_db.RegisterMessage(Where)
-
-OrderBy = _reflection.GeneratedProtocolMessageType(
- "OrderBy",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ORDERBY,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.OrderBy)
- ),
-)
-_sym_db.RegisterMessage(OrderBy)
-
-Cursor = _reflection.GeneratedProtocolMessageType(
- "Cursor",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CURSOR,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.Cursor)
- ),
-)
-_sym_db.RegisterMessage(Cursor)
-
-DocSnapshot = _reflection.GeneratedProtocolMessageType(
- "DocSnapshot",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCSNAPSHOT,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.DocSnapshot)
- ),
-)
-_sym_db.RegisterMessage(DocSnapshot)
-
-FieldPath = _reflection.GeneratedProtocolMessageType(
- "FieldPath",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FIELDPATH,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.FieldPath)
- ),
-)
-_sym_db.RegisterMessage(FieldPath)
-
-ListenTest = _reflection.GeneratedProtocolMessageType(
- "ListenTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTENTEST,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.ListenTest)
- ),
-)
-_sym_db.RegisterMessage(ListenTest)
-
-Snapshot = _reflection.GeneratedProtocolMessageType(
- "Snapshot",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SNAPSHOT,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.Snapshot)
- ),
-)
-_sym_db.RegisterMessage(Snapshot)
-
-DocChange = _reflection.GeneratedProtocolMessageType(
- "DocChange",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCCHANGE,
- __module__="test_v1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1.DocChange)
- ),
-)
-_sym_db.RegisterMessage(DocChange)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(
- descriptor_pb2.FileOptions(),
- _b(
- '\n&com.google.cloud.firestore.conformance\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
- ),
-)
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1/proto/tests_pb2.py b/google/cloud/firestore_v1/proto/tests_pb2.py
deleted file mode 100644
index 126887881e..0000000000
--- a/google/cloud/firestore_v1/proto/tests_pb2.py
+++ /dev/null
@@ -1,2208 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1/proto/tests.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- firestore_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
-)
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1/proto/tests.proto",
- package="google.cloud.firestore_v1.proto",
- syntax="proto3",
- serialized_pb=_b(
- '\n+google/cloud/firestore_v1/proto/tests.proto\x12\x1fgoogle.cloud.firestore_v1.proto\x1a,google/cloud/firestore_v1/proto/common.proto\x1a.google/cloud/firestore_v1/proto/document.proto\x1a/google/cloud/firestore_v1/proto/firestore.proto\x1a+google/cloud/firestore_v1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"@\n\x08TestFile\x12\x34\n\x05tests\x18\x01 \x03(\x0b\x32%.google.cloud.firestore_v1.proto.Test"\xa9\x04\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x0f\n\x07\x63omment\x18\n \x01(\t\x12\x37\n\x03get\x18\x02 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.GetTestH\x00\x12=\n\x06\x63reate\x18\x03 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.CreateTestH\x00\x12\x37\n\x03set\x18\x04 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.SetTestH\x00\x12=\n\x06update\x18\x05 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.UpdateTestH\x00\x12H\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x30.google.cloud.firestore_v1.proto.UpdatePathsTestH\x00\x12=\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.DeleteTestH\x00\x12;\n\x05query\x18\x08 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.QueryTestH\x00\x12=\n\x06listen\x18\t \x01(\x0b\x32+.google.cloud.firestore_v1.proto.ListenTestH\x00\x42\x06\n\x04test"Y\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x38\n\x07request\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.GetDocumentRequest"|\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xb5\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12:\n\x06option\x18\x02 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xb5\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xfd\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12?\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x33\n\x07request\x18\x05 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xa2\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"T\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12:\n\x06\x66ields\x18\x02 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath"\x9f\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x38\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\'.google.cloud.firestore_v1.proto.Clause\x12\x33\n\x05query\x18\x03 \x01(\x0b\x32$.google.firestore.v1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xde\x03\n\x06\x43lause\x12\x39\n\x06select\x18\x01 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.SelectH\x00\x12\x37\n\x05where\x18\x02 \x01(\x0b\x32&.google.cloud.firestore_v1.proto.WhereH\x00\x12<\n\x08order_by\x18\x03 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12;\n\x08start_at\x18\x06 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12>\n\x0bstart_after\x18\x07 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12\x39\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12=\n\nend_before\x18\t \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x42\x08\n\x06\x63lause"D\n\x06Select\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath"a\n\x05Where\x12\x38\n\x04path\x18\x01 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"V\n\x07OrderBy\x12\x38\n\x04path\x18\x01 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"a\n\x06\x43ursor\x12\x42\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32,.google.cloud.firestore_v1.proto.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x94\x01\n\nListenTest\x12\x36\n\tresponses\x18\x01 \x03(\x0b\x32#.google.firestore.v1.ListenResponse\x12<\n\tsnapshots\x18\x02 \x03(\x0b\x32).google.cloud.firestore_v1.proto.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\xa3\x01\n\x08Snapshot\x12+\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\x1d.google.firestore.v1.Document\x12;\n\x07\x63hanges\x18\x02 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xe0\x01\n\tDocChange\x12=\n\x04kind\x18\x01 \x01(\x0e\x32/.google.cloud.firestore_v1.proto.DocChange.Kind\x12*\n\x03\x64oc\x18\x02 \x01(\x0b\x32\x1d.google.firestore.v1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42\x8b\x01\n)com.google.cloud.conformance.firestore.v1B\x0eTestDefinition\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
- ),
- dependencies=[
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- ],
-)
-
-
-_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
- name="Kind",
- full_name="google.cloud.firestore_v1.proto.DocChange.Kind",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ADDED", index=1, number=1, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REMOVED", index=2, number=2, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="MODIFIED", index=3, number=3, options=None, type=None
- ),
- ],
- containing_type=None,
- options=None,
- serialized_start=3566,
- serialized_end=3632,
-)
-_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
-
-
-_TESTFILE = _descriptor.Descriptor(
- name="TestFile",
- full_name="google.cloud.firestore_v1.proto.TestFile",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="tests",
- full_name="google.cloud.firestore_v1.proto.TestFile.tests",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=301,
- serialized_end=365,
-)
-
-
-_TEST = _descriptor.Descriptor(
- name="Test",
- full_name="google.cloud.firestore_v1.proto.Test",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="description",
- full_name="google.cloud.firestore_v1.proto.Test.description",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="comment",
- full_name="google.cloud.firestore_v1.proto.Test.comment",
- index=1,
- number=10,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="get",
- full_name="google.cloud.firestore_v1.proto.Test.get",
- index=2,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="create",
- full_name="google.cloud.firestore_v1.proto.Test.create",
- index=3,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="set",
- full_name="google.cloud.firestore_v1.proto.Test.set",
- index=4,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update",
- full_name="google.cloud.firestore_v1.proto.Test.update",
- index=5,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_paths",
- full_name="google.cloud.firestore_v1.proto.Test.update_paths",
- index=6,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="delete",
- full_name="google.cloud.firestore_v1.proto.Test.delete",
- index=7,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query",
- full_name="google.cloud.firestore_v1.proto.Test.query",
- index=8,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="listen",
- full_name="google.cloud.firestore_v1.proto.Test.listen",
- index=9,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="test",
- full_name="google.cloud.firestore_v1.proto.Test.test",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=368,
- serialized_end=921,
-)
-
-
-_GETTEST = _descriptor.Descriptor(
- name="GetTest",
- full_name="google.cloud.firestore_v1.proto.GetTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="google.cloud.firestore_v1.proto.GetTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="google.cloud.firestore_v1.proto.GetTest.request",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=923,
- serialized_end=1012,
-)
-
-
-_CREATETEST = _descriptor.Descriptor(
- name="CreateTest",
- full_name="google.cloud.firestore_v1.proto.CreateTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="google.cloud.firestore_v1.proto.CreateTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="google.cloud.firestore_v1.proto.CreateTest.json_data",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="google.cloud.firestore_v1.proto.CreateTest.request",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.CreateTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1014,
- serialized_end=1138,
-)
-
-
-_SETTEST = _descriptor.Descriptor(
- name="SetTest",
- full_name="google.cloud.firestore_v1.proto.SetTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="google.cloud.firestore_v1.proto.SetTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="option",
- full_name="google.cloud.firestore_v1.proto.SetTest.option",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="google.cloud.firestore_v1.proto.SetTest.json_data",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="google.cloud.firestore_v1.proto.SetTest.request",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.SetTest.is_error",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1141,
- serialized_end=1322,
-)
-
-
-_UPDATETEST = _descriptor.Descriptor(
- name="UpdateTest",
- full_name="google.cloud.firestore_v1.proto.UpdateTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="google.cloud.firestore_v1.proto.UpdateTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="google.cloud.firestore_v1.proto.UpdateTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="google.cloud.firestore_v1.proto.UpdateTest.json_data",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="google.cloud.firestore_v1.proto.UpdateTest.request",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.UpdateTest.is_error",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1325,
- serialized_end=1506,
-)
-
-
-_UPDATEPATHSTEST = _descriptor.Descriptor(
- name="UpdatePathsTest",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field_paths",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.field_paths",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_values",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.json_values",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.request",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.is_error",
- index=5,
- number=6,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1509,
- serialized_end=1762,
-)
-
-
-_DELETETEST = _descriptor.Descriptor(
- name="DeleteTest",
- full_name="google.cloud.firestore_v1.proto.DeleteTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="google.cloud.firestore_v1.proto.DeleteTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="google.cloud.firestore_v1.proto.DeleteTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="google.cloud.firestore_v1.proto.DeleteTest.request",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.DeleteTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1765,
- serialized_end=1927,
-)
-
-
-_SETOPTION = _descriptor.Descriptor(
- name="SetOption",
- full_name="google.cloud.firestore_v1.proto.SetOption",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="all",
- full_name="google.cloud.firestore_v1.proto.SetOption.all",
- index=0,
- number=1,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.cloud.firestore_v1.proto.SetOption.fields",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1929,
- serialized_end=2013,
-)
-
-
-_QUERYTEST = _descriptor.Descriptor(
- name="QueryTest",
- full_name="google.cloud.firestore_v1.proto.QueryTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="coll_path",
- full_name="google.cloud.firestore_v1.proto.QueryTest.coll_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="clauses",
- full_name="google.cloud.firestore_v1.proto.QueryTest.clauses",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query",
- full_name="google.cloud.firestore_v1.proto.QueryTest.query",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.QueryTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2016,
- serialized_end=2175,
-)
-
-
-_CLAUSE = _descriptor.Descriptor(
- name="Clause",
- full_name="google.cloud.firestore_v1.proto.Clause",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="select",
- full_name="google.cloud.firestore_v1.proto.Clause.select",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="where",
- full_name="google.cloud.firestore_v1.proto.Clause.where",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="order_by",
- full_name="google.cloud.firestore_v1.proto.Clause.order_by",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="offset",
- full_name="google.cloud.firestore_v1.proto.Clause.offset",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="limit",
- full_name="google.cloud.firestore_v1.proto.Clause.limit",
- index=4,
- number=5,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_at",
- full_name="google.cloud.firestore_v1.proto.Clause.start_at",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_after",
- full_name="google.cloud.firestore_v1.proto.Clause.start_after",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_at",
- full_name="google.cloud.firestore_v1.proto.Clause.end_at",
- index=7,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_before",
- full_name="google.cloud.firestore_v1.proto.Clause.end_before",
- index=8,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="clause",
- full_name="google.cloud.firestore_v1.proto.Clause.clause",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=2178,
- serialized_end=2656,
-)
-
-
-_SELECT = _descriptor.Descriptor(
- name="Select",
- full_name="google.cloud.firestore_v1.proto.Select",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.cloud.firestore_v1.proto.Select.fields",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2658,
- serialized_end=2726,
-)
-
-
-_WHERE = _descriptor.Descriptor(
- name="Where",
- full_name="google.cloud.firestore_v1.proto.Where",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="google.cloud.firestore_v1.proto.Where.path",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="op",
- full_name="google.cloud.firestore_v1.proto.Where.op",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_value",
- full_name="google.cloud.firestore_v1.proto.Where.json_value",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2728,
- serialized_end=2825,
-)
-
-
-_ORDERBY = _descriptor.Descriptor(
- name="OrderBy",
- full_name="google.cloud.firestore_v1.proto.OrderBy",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="google.cloud.firestore_v1.proto.OrderBy.path",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="direction",
- full_name="google.cloud.firestore_v1.proto.OrderBy.direction",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2827,
- serialized_end=2913,
-)
-
-
-_CURSOR = _descriptor.Descriptor(
- name="Cursor",
- full_name="google.cloud.firestore_v1.proto.Cursor",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_snapshot",
- full_name="google.cloud.firestore_v1.proto.Cursor.doc_snapshot",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_values",
- full_name="google.cloud.firestore_v1.proto.Cursor.json_values",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2915,
- serialized_end=3012,
-)
-
-
-_DOCSNAPSHOT = _descriptor.Descriptor(
- name="DocSnapshot",
- full_name="google.cloud.firestore_v1.proto.DocSnapshot",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="google.cloud.firestore_v1.proto.DocSnapshot.path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="google.cloud.firestore_v1.proto.DocSnapshot.json_data",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3014,
- serialized_end=3060,
-)
-
-
-_FIELDPATH = _descriptor.Descriptor(
- name="FieldPath",
- full_name="google.cloud.firestore_v1.proto.FieldPath",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field",
- full_name="google.cloud.firestore_v1.proto.FieldPath.field",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3062,
- serialized_end=3088,
-)
-
-
-_LISTENTEST = _descriptor.Descriptor(
- name="ListenTest",
- full_name="google.cloud.firestore_v1.proto.ListenTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="responses",
- full_name="google.cloud.firestore_v1.proto.ListenTest.responses",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="snapshots",
- full_name="google.cloud.firestore_v1.proto.ListenTest.snapshots",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="google.cloud.firestore_v1.proto.ListenTest.is_error",
- index=2,
- number=3,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3091,
- serialized_end=3239,
-)
-
-
-_SNAPSHOT = _descriptor.Descriptor(
- name="Snapshot",
- full_name="google.cloud.firestore_v1.proto.Snapshot",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="docs",
- full_name="google.cloud.firestore_v1.proto.Snapshot.docs",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="changes",
- full_name="google.cloud.firestore_v1.proto.Snapshot.changes",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.cloud.firestore_v1.proto.Snapshot.read_time",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3242,
- serialized_end=3405,
-)
-
-
-_DOCCHANGE = _descriptor.Descriptor(
- name="DocChange",
- full_name="google.cloud.firestore_v1.proto.DocChange",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="kind",
- full_name="google.cloud.firestore_v1.proto.DocChange.kind",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="doc",
- full_name="google.cloud.firestore_v1.proto.DocChange.doc",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="old_index",
- full_name="google.cloud.firestore_v1.proto.DocChange.old_index",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="new_index",
- full_name="google.cloud.firestore_v1.proto.DocChange.new_index",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_DOCCHANGE_KIND],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3408,
- serialized_end=3632,
-)
-
-_TESTFILE.fields_by_name["tests"].message_type = _TEST
-_TEST.fields_by_name["get"].message_type = _GETTEST
-_TEST.fields_by_name["create"].message_type = _CREATETEST
-_TEST.fields_by_name["set"].message_type = _SETTEST
-_TEST.fields_by_name["update"].message_type = _UPDATETEST
-_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
-_TEST.fields_by_name["delete"].message_type = _DELETETEST
-_TEST.fields_by_name["query"].message_type = _QUERYTEST
-_TEST.fields_by_name["listen"].message_type = _LISTENTEST
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
-_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
-_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
-_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
-_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
-_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
-_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
-_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
-_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
-_GETTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
-)
-_CREATETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_SETTEST.fields_by_name["option"].message_type = _SETOPTION
-_SETTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_UPDATETEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_UPDATETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_UPDATEPATHSTEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
-_UPDATEPATHSTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_DELETETEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_DELETETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
-_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
-_QUERYTEST.fields_by_name[
- "query"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
-)
-_CLAUSE.fields_by_name["select"].message_type = _SELECT
-_CLAUSE.fields_by_name["where"].message_type = _WHERE
-_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
-_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
-_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
-_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
-_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
-_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
-_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
-_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
-_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
-_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
-_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
-_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
- "clause"
-]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
-_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
-_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
-_WHERE.fields_by_name["path"].message_type = _FIELDPATH
-_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
-_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
-_LISTENTEST.fields_by_name[
- "responses"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
-)
-_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
-_SNAPSHOT.fields_by_name[
- "docs"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
-_SNAPSHOT.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
-_DOCCHANGE.fields_by_name[
- "doc"
-].message_type = (
- google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_DOCCHANGE_KIND.containing_type = _DOCCHANGE
-DESCRIPTOR.message_types_by_name["TestFile"] = _TESTFILE
-DESCRIPTOR.message_types_by_name["Test"] = _TEST
-DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
-DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
-DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
-DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
-DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
-DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
-DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
-DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
-DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
-DESCRIPTOR.message_types_by_name["Select"] = _SELECT
-DESCRIPTOR.message_types_by_name["Where"] = _WHERE
-DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
-DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
-DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
-DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
-DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
-DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
-DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-TestFile = _reflection.GeneratedProtocolMessageType(
- "TestFile",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TESTFILE,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.TestFile)
- ),
-)
-_sym_db.RegisterMessage(TestFile)
-
-Test = _reflection.GeneratedProtocolMessageType(
- "Test",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Test)
- ),
-)
-_sym_db.RegisterMessage(Test)
-
-GetTest = _reflection.GeneratedProtocolMessageType(
- "GetTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETTEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.GetTest)
- ),
-)
-_sym_db.RegisterMessage(GetTest)
-
-CreateTest = _reflection.GeneratedProtocolMessageType(
- "CreateTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATETEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.CreateTest)
- ),
-)
-_sym_db.RegisterMessage(CreateTest)
-
-SetTest = _reflection.GeneratedProtocolMessageType(
- "SetTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SETTEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.SetTest)
- ),
-)
-_sym_db.RegisterMessage(SetTest)
-
-UpdateTest = _reflection.GeneratedProtocolMessageType(
- "UpdateTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATETEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.UpdateTest)
- ),
-)
-_sym_db.RegisterMessage(UpdateTest)
-
-UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
- "UpdatePathsTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEPATHSTEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.UpdatePathsTest)
- ),
-)
-_sym_db.RegisterMessage(UpdatePathsTest)
-
-DeleteTest = _reflection.GeneratedProtocolMessageType(
- "DeleteTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETETEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DeleteTest)
- ),
-)
-_sym_db.RegisterMessage(DeleteTest)
-
-SetOption = _reflection.GeneratedProtocolMessageType(
- "SetOption",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SETOPTION,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.SetOption)
- ),
-)
-_sym_db.RegisterMessage(SetOption)
-
-QueryTest = _reflection.GeneratedProtocolMessageType(
- "QueryTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_QUERYTEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.QueryTest)
- ),
-)
-_sym_db.RegisterMessage(QueryTest)
-
-Clause = _reflection.GeneratedProtocolMessageType(
- "Clause",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CLAUSE,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Clause)
- ),
-)
-_sym_db.RegisterMessage(Clause)
-
-Select = _reflection.GeneratedProtocolMessageType(
- "Select",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SELECT,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Select)
- ),
-)
-_sym_db.RegisterMessage(Select)
-
-Where = _reflection.GeneratedProtocolMessageType(
- "Where",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WHERE,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Where)
- ),
-)
-_sym_db.RegisterMessage(Where)
-
-OrderBy = _reflection.GeneratedProtocolMessageType(
- "OrderBy",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ORDERBY,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.OrderBy)
- ),
-)
-_sym_db.RegisterMessage(OrderBy)
-
-Cursor = _reflection.GeneratedProtocolMessageType(
- "Cursor",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CURSOR,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Cursor)
- ),
-)
-_sym_db.RegisterMessage(Cursor)
-
-DocSnapshot = _reflection.GeneratedProtocolMessageType(
- "DocSnapshot",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCSNAPSHOT,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DocSnapshot)
- ),
-)
-_sym_db.RegisterMessage(DocSnapshot)
-
-FieldPath = _reflection.GeneratedProtocolMessageType(
- "FieldPath",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FIELDPATH,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.FieldPath)
- ),
-)
-_sym_db.RegisterMessage(FieldPath)
-
-ListenTest = _reflection.GeneratedProtocolMessageType(
- "ListenTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTENTEST,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.ListenTest)
- ),
-)
-_sym_db.RegisterMessage(ListenTest)
-
-Snapshot = _reflection.GeneratedProtocolMessageType(
- "Snapshot",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SNAPSHOT,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Snapshot)
- ),
-)
-_sym_db.RegisterMessage(Snapshot)
-
-DocChange = _reflection.GeneratedProtocolMessageType(
- "DocChange",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCCHANGE,
- __module__="google.cloud.firestore_v1.proto.tests_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DocChange)
- ),
-)
-_sym_db.RegisterMessage(DocChange)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(
- descriptor_pb2.FileOptions(),
- _b(
- '\n)com.google.cloud.conformance.firestore.v1B\016TestDefinition\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
- ),
-)
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1/proto/write.proto b/google/cloud/firestore_v1/proto/write.proto
deleted file mode 100644
index 51d9239180..0000000000
--- a/google/cloud/firestore_v1/proto/write.proto
+++ /dev/null
@@ -1,254 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1;
-
-import "google/firestore/v1/common.proto";
-import "google/firestore/v1/document.proto";
-import "google/protobuf/timestamp.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "WriteProto";
-option java_package = "com.google.firestore.v1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1";
-
-// A write on a document.
-message Write {
- // The operation to execute.
- oneof operation {
- // A document to write.
- Document update = 1;
-
- // A document name to delete. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string delete = 2;
-
- // Applies a transformation to a document.
- // At most one `transform` per document is allowed in a given request.
- // An `update` cannot follow a `transform` on the same document in a given
- // request.
- DocumentTransform transform = 6;
- }
-
- // The fields to update in this write.
- //
- // This field can be set only when the operation is `update`.
- // If the mask is not set for an `update` and the document exists, any
- // existing data will be overwritten.
- // If the mask is set and the document on the server has fields not covered by
- // the mask, they are left unchanged.
- // Fields referenced in the mask, but not present in the input document, are
- // deleted from the document on the server.
- // The field paths in this mask must not contain a reserved field name.
- DocumentMask update_mask = 3;
-
- // An optional precondition on the document.
- //
- // The write will fail if this is set and not met by the target document.
- Precondition current_document = 4;
-}
-
-// A transformation of a document.
-message DocumentTransform {
- // A transformation of a field of the document.
- message FieldTransform {
- // A value that is calculated by the server.
- enum ServerValue {
- // Unspecified. This value must not be used.
- SERVER_VALUE_UNSPECIFIED = 0;
-
- // The time at which the server processed the request, with millisecond
- // precision.
- REQUEST_TIME = 1;
- }
-
- // The path of the field. See [Document.fields][google.firestore.v1.Document.fields] for the field path syntax
- // reference.
- string field_path = 1;
-
- // The transformation to apply on the field.
- oneof transform_type {
- // Sets the field to the given server value.
- ServerValue set_to_server_value = 2;
-
- // Adds the given value to the field's current value.
- //
- // This must be an integer or a double value.
- // If the field is not an integer or double, or if the field does not yet
- // exist, the transformation will set the field to the given value.
- // If either of the given value or the current field value are doubles,
- // both values will be interpreted as doubles. Double arithmetic and
- // representation of double values follow IEEE 754 semantics.
- // If there is positive/negative integer overflow, the field is resolved
- // to the largest magnitude positive/negative integer.
- Value increment = 3;
-
- // Sets the field to the maximum of its current value and the given value.
- //
- // This must be an integer or a double value.
- // If the field is not an integer or double, or if the field does not yet
- // exist, the transformation will set the field to the given value.
- // If a maximum operation is applied where the field and the input value
- // are of mixed types (that is - one is an integer and one is a double)
- // the field takes on the type of the larger operand. If the operands are
- // equivalent (e.g. 3 and 3.0), the field does not change.
- // 0, 0.0, and -0.0 are all zero. The maximum of a zero stored value and
- // zero input value is always the stored value.
- // The maximum of any numeric value x and NaN is NaN.
- Value maximum = 4;
-
- // Sets the field to the minimum of its current value and the given value.
- //
- // This must be an integer or a double value.
- // If the field is not an integer or double, or if the field does not yet
- // exist, the transformation will set the field to the input value.
- // If a minimum operation is applied where the field and the input value
- // are of mixed types (that is - one is an integer and one is a double)
- // the field takes on the type of the smaller operand. If the operands are
- // equivalent (e.g. 3 and 3.0), the field does not change.
- // 0, 0.0, and -0.0 are all zero. The minimum of a zero stored value and
- // zero input value is always the stored value.
- // The minimum of any numeric value x and NaN is NaN.
- Value minimum = 5;
-
- // Append the given elements in order if they are not already present in
- // the current field value.
- // If the field is not an array, or if the field does not yet exist, it is
- // first set to the empty array.
- //
- // Equivalent numbers of different types (e.g. 3L and 3.0) are
- // considered equal when checking if a value is missing.
- // NaN is equal to NaN, and Null is equal to Null.
- // If the input contains multiple equivalent values, only the first will
- // be considered.
- //
- // The corresponding transform_result will be the null value.
- ArrayValue append_missing_elements = 6;
-
- // Remove all of the given elements from the array in the field.
- // If the field is not an array, or if the field does not yet exist, it is
- // set to the empty array.
- //
- // Equivalent numbers of the different types (e.g. 3L and 3.0) are
- // considered equal when deciding whether an element should be removed.
- // NaN is equal to NaN, and Null is equal to Null.
- // This will remove all equivalent values if there are duplicates.
- //
- // The corresponding transform_result will be the null value.
- ArrayValue remove_all_from_array = 7;
- }
- }
-
- // The name of the document to transform.
- string document = 1;
-
- // The list of transformations to apply to the fields of the document, in
- // order.
- // This must not be empty.
- repeated FieldTransform field_transforms = 2;
-}
-
-// The result of applying a write.
-message WriteResult {
- // The last update time of the document after applying the write. Not set
- // after a `delete`.
- //
- // If the write did not actually change the document, this will be the
- // previous update_time.
- google.protobuf.Timestamp update_time = 1;
-
- // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], in the
- // same order.
- repeated Value transform_results = 2;
-}
-
-// A [Document][google.firestore.v1.Document] has changed.
-//
-// May be the result of multiple [writes][google.firestore.v1.Write], including deletes, that
-// ultimately resulted in a new value for the [Document][google.firestore.v1.Document].
-//
-// Multiple [DocumentChange][google.firestore.v1.DocumentChange] messages may be returned for the same logical
-// change, if multiple targets are affected.
-message DocumentChange {
- // The new state of the [Document][google.firestore.v1.Document].
- //
- // If `mask` is set, contains only fields that were updated or added.
- Document document = 1;
-
- // A set of target IDs of targets that match this document.
- repeated int32 target_ids = 5;
-
- // A set of target IDs for targets that no longer match this document.
- repeated int32 removed_target_ids = 6;
-}
-
-// A [Document][google.firestore.v1.Document] has been deleted.
-//
-// May be the result of multiple [writes][google.firestore.v1.Write], including updates, the
-// last of which deleted the [Document][google.firestore.v1.Document].
-//
-// Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] messages may be returned for the same logical
-// delete, if multiple targets are affected.
-message DocumentDelete {
- // The resource name of the [Document][google.firestore.v1.Document] that was deleted.
- string document = 1;
-
- // A set of target IDs for targets that previously matched this entity.
- repeated int32 removed_target_ids = 6;
-
- // The read timestamp at which the delete was observed.
- //
- // Greater or equal to the `commit_time` of the delete.
- google.protobuf.Timestamp read_time = 4;
-}
-
-// A [Document][google.firestore.v1.Document] has been removed from the view of the targets.
-//
-// Sent if the document is no longer relevant to a target and is out of view.
-// Can be sent instead of a DocumentDelete or a DocumentChange if the server
-// can not send the new value of the document.
-//
-// Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] messages may be returned for the same logical
-// write or delete, if multiple targets are affected.
-message DocumentRemove {
- // The resource name of the [Document][google.firestore.v1.Document] that has gone out of view.
- string document = 1;
-
- // A set of target IDs for targets that previously matched this document.
- repeated int32 removed_target_ids = 2;
-
- // The read timestamp at which the remove was observed.
- //
- // Greater or equal to the `commit_time` of the change/delete/remove.
- google.protobuf.Timestamp read_time = 4;
-}
-
-// A digest of all the documents that match a given target.
-message ExistenceFilter {
- // The target ID to which this filter applies.
- int32 target_id = 1;
-
- // The total count of documents that match [target_id][google.firestore.v1.ExistenceFilter.target_id].
- //
- // If different from the count of documents in the client that match, the
- // client must manually determine which documents no longer match the target.
- int32 count = 2;
-}
diff --git a/google/cloud/firestore_v1/proto/write_pb2.py b/google/cloud/firestore_v1/proto/write_pb2.py
deleted file mode 100644
index 1ed1c44246..0000000000
--- a/google/cloud/firestore_v1/proto/write_pb2.py
+++ /dev/null
@@ -1,1146 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1/proto/write.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
-)
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1/proto/write.proto",
- package="google.firestore.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\027com.google.firestore.v1B\nWriteProtoP\001Z=": _operator_enum.GREATER_THAN_OR_EQUAL,
- ">": _operator_enum.GREATER_THAN,
- "array_contains": _operator_enum.ARRAY_CONTAINS,
- "in": _operator_enum.IN,
- "array_contains_any": _operator_enum.ARRAY_CONTAINS_ANY,
-}
-_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}."
-_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values'
-_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values."
-_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}."
-_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values."
-_MISSING_ORDER_BY = (
- 'The "order by" field path {!r} is not present in the cursor data {!r}. '
- "All fields sent to ``order_by()`` must be present in the fields "
- "if passed to one of ``start_at()`` / ``start_after()`` / "
- "``end_before()`` / ``end_at()`` to define a cursor."
-)
-_NO_ORDERS_FOR_CURSOR = (
- "Attempting to create a cursor with no fields to order on. "
- "When defining a cursor with one of ``start_at()`` / ``start_after()`` / "
- "``end_before()`` / ``end_at()``, all fields in the cursor must "
- "come from fields set in ``order_by()``."
-)
-_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}."
-
-class Query(object):
+class Query(BaseQuery):
"""Represents a query to the Firestore API.
Instances of this class are considered immutable: all methods that
@@ -76,13 +48,13 @@ class Query(object):
parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
The collection that this query applies to.
projection (Optional[:class:`google.cloud.proto.firestore.v1.\
- query_pb2.StructuredQuery.Projection`]):
+ query.StructuredQuery.Projection`]):
A projection of document fields to limit the query results to.
field_filters (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
- query_pb2.StructuredQuery.FieldFilter`, ...]]):
+ query.StructuredQuery.FieldFilter`, ...]]):
The filters to be applied in the query.
orders (Optional[Tuple[:class:`google.cloud.proto.firestore.v1.\
- query_pb2.StructuredQuery.Order`, ...]]):
+ query.StructuredQuery.Order`, ...]]):
The "order by" entries to use in the query.
limit (Optional[int]):
The maximum number of documents the query is allowed to return.
@@ -122,11 +94,6 @@ class Query(object):
When true, selects all descendant collections.
"""
- ASCENDING = "ASCENDING"
- """str: Sort query results in ascending order on a field."""
- DESCENDING = "DESCENDING"
- """str: Sort query results in descending order on a field."""
-
def __init__(
self,
parent,
@@ -134,610 +101,77 @@ def __init__(
field_filters=(),
orders=(),
limit=None,
+ limit_to_last=False,
offset=None,
start_at=None,
end_at=None,
all_descendants=False,
- ):
- self._parent = parent
- self._projection = projection
- self._field_filters = field_filters
- self._orders = orders
- self._limit = limit
- self._offset = offset
- self._start_at = start_at
- self._end_at = end_at
- self._all_descendants = all_descendants
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return (
- self._parent == other._parent
- and self._projection == other._projection
- and self._field_filters == other._field_filters
- and self._orders == other._orders
- and self._limit == other._limit
- and self._offset == other._offset
- and self._start_at == other._start_at
- and self._end_at == other._end_at
- and self._all_descendants == other._all_descendants
- )
-
- @property
- def _client(self):
- """The client of the parent collection.
-
- Returns:
- :class:`~google.cloud.firestore_v1.client.Client`:
- The client that owns this query.
- """
- return self._parent._client
-
- def select(self, field_paths):
- """Project documents matching query to a limited set of fields.
-
- See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
- more information on **field paths**.
-
- If the current query already has a projection set (i.e. has already
- called :meth:`~google.cloud.firestore_v1.query.Query.select`), this
- will overwrite it.
-
- Args:
- field_paths (Iterable[str, ...]): An iterable of field paths
- (``.``-delimited list of field names) to use as a projection
- of document fields in the query results.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A "projected" query. Acts as a copy of the current query,
- modified with the newly added projection.
- Raises:
- ValueError: If any ``field_path`` is invalid.
- """
- field_paths = list(field_paths)
- for field_path in field_paths:
- field_path_module.split_field_path(field_path) # raises
-
- new_projection = query_pb2.StructuredQuery.Projection(
- fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
- for field_path in field_paths
- ]
+ ) -> None:
+ super(Query, self).__init__(
+ parent=parent,
+ projection=projection,
+ field_filters=field_filters,
+ orders=orders,
+ limit=limit,
+ limit_to_last=limit_to_last,
+ offset=offset,
+ start_at=start_at,
+ end_at=end_at,
+ all_descendants=all_descendants,
)
- return self.__class__(
- self._parent,
- projection=new_projection,
- field_filters=self._field_filters,
- orders=self._orders,
- limit=self._limit,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- all_descendants=self._all_descendants,
- )
-
- def where(self, field_path, op_string, value):
- """Filter the query on a field.
-
- See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
- more information on **field paths**.
-
- Returns a new :class:`~google.cloud.firestore_v1.query.Query` that
- filters on a specific field path, according to an operation (e.g.
- ``==`` or "equals") and a particular value to be paired with that
- operation.
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) for the field to filter on.
- op_string (str): A comparison operation in the form of a string.
- Acceptable values are ``<``, ``<=``, ``==``, ``>=``, ``>``,
- ``in``, ``array_contains`` and ``array_contains_any``.
- value (Any): The value to compare the field against in the filter.
- If ``value`` is :data:`None` or a NaN, then ``==`` is the only
- allowed operation.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A filtered query. Acts as a copy of the current query,
- modified with the newly added filter.
-
- Raises:
- ValueError: If ``field_path`` is invalid.
- ValueError: If ``value`` is a NaN or :data:`None` and
- ``op_string`` is not ``==``.
- """
- field_path_module.split_field_path(field_path) # raises
-
- if value is None:
- if op_string != _EQ_OP:
- raise ValueError(_BAD_OP_NAN_NULL)
- filter_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL,
- )
- elif _isnan(value):
- if op_string != _EQ_OP:
- raise ValueError(_BAD_OP_NAN_NULL)
- filter_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=enums.StructuredQuery.UnaryFilter.Operator.IS_NAN,
- )
- elif isinstance(value, (transforms.Sentinel, transforms._ValueList)):
- raise ValueError(_INVALID_WHERE_TRANSFORM)
- else:
- filter_pb = query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=_enum_from_op_string(op_string),
- value=_helpers.encode_value(value),
- )
-
- new_filters = self._field_filters + (filter_pb,)
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=new_filters,
- orders=self._orders,
- limit=self._limit,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- all_descendants=self._all_descendants,
- )
-
- @staticmethod
- def _make_order(field_path, direction):
- """Helper for :meth:`order_by`."""
- return query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- direction=_enum_from_direction(direction),
- )
-
- def order_by(self, field_path, direction=ASCENDING):
- """Modify the query to add an order clause on a specific field.
-
- See :meth:`~google.cloud.firestore_v1.client.Client.field_path` for
- more information on **field paths**.
-
- Successive :meth:`~google.cloud.firestore_v1.query.Query.order_by`
- calls will further refine the ordering of results returned by the query
- (i.e. the new "order by" fields will be added to existing ones).
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) on which to order the query results.
- direction (Optional[str]): The direction to order by. Must be one
- of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to
- :attr:`ASCENDING`.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- An ordered query. Acts as a copy of the current query, modified
- with the newly added "order by" constraint.
-
- Raises:
- ValueError: If ``field_path`` is invalid.
- ValueError: If ``direction`` is not one of :attr:`ASCENDING` or
- :attr:`DESCENDING`.
- """
- field_path_module.split_field_path(field_path) # raises
-
- order_pb = self._make_order(field_path, direction)
-
- new_orders = self._orders + (order_pb,)
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=self._field_filters,
- orders=new_orders,
- limit=self._limit,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- all_descendants=self._all_descendants,
- )
-
- def limit(self, count):
- """Limit a query to return a fixed number of results.
-
- If the current query already has a limit set, this will overwrite it.
-
- Args:
- count (int): Maximum number of documents to return that match
- the query.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A limited query. Acts as a copy of the current query, modified
- with the newly added "limit" filter.
- """
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=self._field_filters,
- orders=self._orders,
- limit=count,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- all_descendants=self._all_descendants,
- )
-
- def offset(self, num_to_skip):
- """Skip to an offset in a query.
-
- If the current query already has specified an offset, this will
- overwrite it.
-
- Args:
- num_to_skip (int): The number of results to skip at the beginning
- of query results. (Must be non-negative.)
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- An offset query. Acts as a copy of the current query, modified
- with the newly added "offset" field.
- """
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=self._field_filters,
- orders=self._orders,
- limit=self._limit,
- offset=num_to_skip,
- start_at=self._start_at,
- end_at=self._end_at,
- all_descendants=self._all_descendants,
- )
-
- def _check_snapshot(self, document_fields):
- """Validate local snapshots for non-collection-group queries.
-
- Raises:
- ValueError: for non-collection-group queries, if the snapshot
- is from a different collection.
- """
- if self._all_descendants:
- return
-
- if document_fields.reference._path[:-1] != self._parent._path:
- raise ValueError("Cannot use snapshot from another collection as a cursor.")
-
- def _cursor_helper(self, document_fields, before, start):
- """Set values to be used for a ``start_at`` or ``end_at`` cursor.
-
- The values will later be used in a query protobuf.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
-
- Args:
- document_fields
- (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
- a document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
- before (bool): Flag indicating if the document in
- ``document_fields`` should (:data:`False`) or
- shouldn't (:data:`True`) be included in the result set.
- start (Optional[bool]): determines if the cursor is a ``start_at``
- cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`).
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor. Acts as a copy of the current query, modified
- with the newly added "start at" cursor.
- """
- if isinstance(document_fields, tuple):
- document_fields = list(document_fields)
- elif isinstance(document_fields, document.DocumentSnapshot):
- self._check_snapshot(document_fields)
- else:
- # NOTE: We copy so that the caller can't modify after calling.
- document_fields = copy.deepcopy(document_fields)
-
- cursor_pair = document_fields, before
- query_kwargs = {
- "projection": self._projection,
- "field_filters": self._field_filters,
- "orders": self._orders,
- "limit": self._limit,
- "offset": self._offset,
- "all_descendants": self._all_descendants,
- }
- if start:
- query_kwargs["start_at"] = cursor_pair
- query_kwargs["end_at"] = self._end_at
- else:
- query_kwargs["start_at"] = self._start_at
- query_kwargs["end_at"] = cursor_pair
-
- return self.__class__(self._parent, **query_kwargs)
-
- def start_at(self, document_fields):
- """Start query results at a particular document value.
-
- The result set will **include** the document specified by
- ``document_fields``.
-
- If the current query already has specified a start cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1.query.Query.start_after` -- this
- will overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
-
- Args:
- document_fields
- (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
- a document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor. Acts as
- a copy of the current query, modified with the newly added
- "start at" cursor.
- """
- return self._cursor_helper(document_fields, before=True, start=True)
-
- def start_after(self, document_fields):
- """Start query results after a particular document value.
-
- The result set will **exclude** the document specified by
- ``document_fields``.
-
- If the current query already has specified a start cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1.query.Query.start_at` -- this will
- overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
-
- Args:
- document_fields
- (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
- a document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor. Acts as a copy of the current query, modified
- with the newly added "start after" cursor.
- """
- return self._cursor_helper(document_fields, before=False, start=True)
-
- def end_before(self, document_fields):
- """End query results before a particular document value.
-
- The result set will **exclude** the document specified by
- ``document_fields``.
-
- If the current query already has specified an end cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1.query.Query.end_at` -- this will
- overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
-
- Args:
- document_fields
- (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
- a document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor. Acts as a copy of the current query, modified
- with the newly added "end before" cursor.
- """
- return self._cursor_helper(document_fields, before=True, start=False)
-
- def end_at(self, document_fields):
- """End query results at a particular document value.
-
- The result set will **include** the document specified by
- ``document_fields``.
-
- If the current query already has specified an end cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1.query.Query.end_before` -- this will
- overwrite it.
+ def get(
+ self,
+ transaction=None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> list:
+ """Read the documents in the collection that match this query.
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1.query.Query.order_by`.
+ This sends a ``RunQuery`` RPC and returns a list of documents
+ returned in the stream of ``RunQueryResponse`` messages.
Args:
- document_fields
- (Union[:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`, dict, list, tuple]):
- a document snapshot or a dictionary/list/tuple of fields
- representing a query results cursor. A cursor is a collection
- of values that represent a position in a query result set.
-
- Returns:
- :class:`~google.cloud.firestore_v1.query.Query`:
- A query with cursor. Acts as a copy of the current query, modified
- with the newly added "end at" cursor.
- """
- return self._cursor_helper(document_fields, before=False, start=False)
-
- def _filters_pb(self):
- """Convert all the filters into a single generic Filter protobuf.
-
- This may be a lone field filter or unary filter, may be a composite
- filter or may be :data:`None`.
-
- Returns:
- :class:`google.cloud.firestore_v1.types.StructuredQuery.Filter`:
- A "generic" filter representing the current query's filters.
- """
- num_filters = len(self._field_filters)
- if num_filters == 0:
- return None
- elif num_filters == 1:
- return _filter_pb(self._field_filters[0])
- else:
- composite_filter = query_pb2.StructuredQuery.CompositeFilter(
- op=enums.StructuredQuery.CompositeFilter.Operator.AND,
- filters=[_filter_pb(filter_) for filter_ in self._field_filters],
- )
- return query_pb2.StructuredQuery.Filter(composite_filter=composite_filter)
-
- @staticmethod
- def _normalize_projection(projection):
- """Helper: convert field paths to message."""
- if projection is not None:
-
- fields = list(projection.fields)
-
- if not fields:
- field_ref = query_pb2.StructuredQuery.FieldReference(
- field_path="__name__"
- )
- return query_pb2.StructuredQuery.Projection(fields=[field_ref])
-
- return projection
-
- def _normalize_orders(self):
- """Helper: adjust orders based on cursors, where clauses."""
- orders = list(self._orders)
- _has_snapshot_cursor = False
-
- if self._start_at:
- if isinstance(self._start_at[0], document.DocumentSnapshot):
- _has_snapshot_cursor = True
-
- if self._end_at:
- if isinstance(self._end_at[0], document.DocumentSnapshot):
- _has_snapshot_cursor = True
-
- if _has_snapshot_cursor:
- should_order = [
- _enum_from_op_string(key)
- for key in _COMPARISON_OPERATORS
- if key not in (_EQ_OP, "array_contains")
- ]
- order_keys = [order.field.field_path for order in orders]
- for filter_ in self._field_filters:
- field = filter_.field.field_path
- if filter_.op in should_order and field not in order_keys:
- orders.append(self._make_order(field, "ASCENDING"))
- if not orders:
- orders.append(self._make_order("__name__", "ASCENDING"))
- else:
- order_keys = [order.field.field_path for order in orders]
- if "__name__" not in order_keys:
- direction = orders[-1].direction # enum?
- orders.append(self._make_order("__name__", direction))
-
- return orders
-
- def _normalize_cursor(self, cursor, orders):
- """Helper: convert cursor to a list of values based on orders."""
- if cursor is None:
- return
-
- if not orders:
- raise ValueError(_NO_ORDERS_FOR_CURSOR)
-
- document_fields, before = cursor
-
- order_keys = [order.field.field_path for order in orders]
-
- if isinstance(document_fields, document.DocumentSnapshot):
- snapshot = document_fields
- document_fields = snapshot.to_dict()
- document_fields["__name__"] = snapshot.reference
-
- if isinstance(document_fields, dict):
- # Transform to list using orders
- values = []
- data = document_fields
- for order_key in order_keys:
- try:
- if order_key in data:
- values.append(data[order_key])
- else:
- values.append(
- field_path_module.get_nested_value(order_key, data)
- )
- except KeyError:
- msg = _MISSING_ORDER_BY.format(order_key, data)
- raise ValueError(msg)
- document_fields = values
-
- if len(document_fields) != len(orders):
- msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys)
- raise ValueError(msg)
-
- _transform_bases = (transforms.Sentinel, transforms._ValueList)
-
- for index, key_field in enumerate(zip(order_keys, document_fields)):
- key, field = key_field
-
- if isinstance(field, _transform_bases):
- msg = _INVALID_CURSOR_TRANSFORM
- raise ValueError(msg)
-
- if key == "__name__" and isinstance(field, six.string_types):
- document_fields[index] = self._parent.document(field)
-
- return document_fields, before
-
- def _to_protobuf(self):
- """Convert the current query into the equivalent protobuf.
+ transaction
+ (Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
+ An existing transaction that this query will run in.
+ If a ``transaction`` is used and it already has write operations
+ added, this method cannot be used (i.e. read-after-write is not
+ allowed).
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Returns:
- :class:`google.cloud.firestore_v1.types.StructuredQuery`:
- The query protobuf.
+ list: The documents in the collection that match this query.
"""
- projection = self._normalize_projection(self._projection)
- orders = self._normalize_orders()
- start_at = self._normalize_cursor(self._start_at, orders)
- end_at = self._normalize_cursor(self._end_at, orders)
-
- query_kwargs = {
- "select": projection,
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(
- collection_id=self._parent.id, all_descendants=self._all_descendants
+ is_limited_to_last = self._limit_to_last
+
+ if self._limit_to_last:
+ # In order to fetch up to `self._limit` results from the end of the
+ # query flip the defined ordering on the query to start from the
+ # end, retrieving up to `self._limit` results from the backend.
+ for order in self._orders:
+ order.direction = _enum_from_direction(
+ self.DESCENDING
+ if order.direction == self.ASCENDING
+ else self.ASCENDING
)
- ],
- "where": self._filters_pb(),
- "order_by": orders,
- "start_at": _cursor_pb(start_at),
- "end_at": _cursor_pb(end_at),
- }
- if self._offset is not None:
- query_kwargs["offset"] = self._offset
- if self._limit is not None:
- query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit)
+ self._limit_to_last = False
- return query_pb2.StructuredQuery(**query_kwargs)
+ result = self.stream(transaction=transaction, retry=retry, timeout=timeout)
+ if is_limited_to_last:
+ result = reversed(list(result))
- def get(self, transaction=None):
- """Deprecated alias for :meth:`stream`."""
- warnings.warn(
- "'Query.get' is deprecated: please use 'Query.stream' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.stream(transaction=transaction)
+ return list(result)
- def stream(self, transaction=None):
+ def stream(
+ self,
+ transaction=None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[document.DocumentSnapshot, Any, None]:
"""Read the documents in the collection that match this query.
This sends a ``RunQuery`` RPC and then returns an iterator which
@@ -759,17 +193,21 @@ def stream(self, transaction=None):
transaction
(Optional[:class:`~google.cloud.firestore_v1.transaction.Transaction`]):
An existing transaction that this query will run in.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Yields:
:class:`~google.cloud.firestore_v1.document.DocumentSnapshot`:
The next document that fulfills the query.
"""
- parent_path, expected_prefix = self._parent._parent_info()
+ request, expected_prefix, kwargs = self._prep_stream(
+ transaction, retry, timeout,
+ )
+
response_iterator = self._client._firestore_api.run_query(
- parent_path,
- self._to_protobuf(),
- transaction=_helpers.get_transaction_id(transaction),
- metadata=self._client._rpc_metadata,
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
)
for response in response_iterator:
@@ -784,7 +222,7 @@ def stream(self, transaction=None):
if snapshot is not None:
yield snapshot
- def on_snapshot(self, callback):
+ def on_snapshot(self, callback: Callable) -> Watch:
"""Monitor the documents in this collection that match this query.
This starts a watch on this query using a background thread. The
@@ -817,225 +255,80 @@ def on_snapshot(docs, changes, read_time):
self, callback, document.DocumentSnapshot, document.DocumentReference
)
- def _comparator(self, doc1, doc2):
- _orders = self._orders
-
- # Add implicit sorting by name, using the last specified direction.
- if len(_orders) == 0:
- lastDirection = Query.ASCENDING
- else:
- if _orders[-1].direction == 1:
- lastDirection = Query.ASCENDING
- else:
- lastDirection = Query.DESCENDING
-
- orderBys = list(_orders)
-
- order_pb = query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path="id"),
- direction=_enum_from_direction(lastDirection),
- )
- orderBys.append(order_pb)
-
- for orderBy in orderBys:
- if orderBy.field.field_path == "id":
- # If ordering by docuent id, compare resource paths.
- comp = Order()._compare_to(doc1.reference._path, doc2.reference._path)
- else:
- if (
- orderBy.field.field_path not in doc1._data
- or orderBy.field.field_path not in doc2._data
- ):
- raise ValueError(
- "Can only compare fields that exist in the "
- "DocumentSnapshot. Please include the fields you are "
- "ordering on in your select() call."
- )
- v1 = doc1._data[orderBy.field.field_path]
- v2 = doc2._data[orderBy.field.field_path]
- encoded_v1 = _helpers.encode_value(v1)
- encoded_v2 = _helpers.encode_value(v2)
- comp = Order().compare(encoded_v1, encoded_v2)
-
- if comp != 0:
- # 1 == Ascending, -1 == Descending
- return orderBy.direction * comp
-
- return 0
-
-
-def _enum_from_op_string(op_string):
- """Convert a string representation of a binary operator to an enum.
-
- These enums come from the protobuf message definition
- ``StructuredQuery.FieldFilter.Operator``.
-
- Args:
- op_string (str): A comparison operation in the form of a string.
- Acceptable values are ``<``, ``<=``, ``==``, ``>=``
- and ``>``.
-
- Returns:
- int: The enum corresponding to ``op_string``.
-
- Raises:
- ValueError: If ``op_string`` is not a valid operator.
- """
- try:
- return _COMPARISON_OPERATORS[op_string]
- except KeyError:
- choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys()))
- msg = _BAD_OP_STRING.format(op_string, choices)
- raise ValueError(msg)
-
-
-def _isnan(value):
- """Check if a value is NaN.
-
- This differs from ``math.isnan`` in that **any** input type is
- allowed.
-
- Args:
- value (Any): A value to check for NaN-ness.
-
- Returns:
- bool: Indicates if the value is the NaN float.
- """
- if isinstance(value, float):
- return math.isnan(value)
- else:
- return False
-
-
-def _enum_from_direction(direction):
- """Convert a string representation of a direction to an enum.
-
- Args:
- direction (str): A direction to order by. Must be one of
- :attr:`~google.cloud.firestore.Query.ASCENDING` or
- :attr:`~google.cloud.firestore.Query.DESCENDING`.
-
- Returns:
- int: The enum corresponding to ``direction``.
-
- Raises:
- ValueError: If ``direction`` is not a valid direction.
- """
- if isinstance(direction, int):
- return direction
-
- if direction == Query.ASCENDING:
- return enums.StructuredQuery.Direction.ASCENDING
- elif direction == Query.DESCENDING:
- return enums.StructuredQuery.Direction.DESCENDING
- else:
- msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING)
- raise ValueError(msg)
-
-
-def _filter_pb(field_or_unary):
- """Convert a specific protobuf filter to the generic filter type.
-
- Args:
- field_or_unary (Union[google.cloud.proto.firestore.v1.\
- query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\
- firestore.v1.query_pb2.StructuredQuery.FieldFilter]): A
- field or unary filter to convert to a generic filter.
-
- Returns:
- google.cloud.firestore_v1.types.\
- StructuredQuery.Filter: A "generic" filter.
-
- Raises:
- ValueError: If ``field_or_unary`` is not a field or unary filter.
- """
- if isinstance(field_or_unary, query_pb2.StructuredQuery.FieldFilter):
- return query_pb2.StructuredQuery.Filter(field_filter=field_or_unary)
- elif isinstance(field_or_unary, query_pb2.StructuredQuery.UnaryFilter):
- return query_pb2.StructuredQuery.Filter(unary_filter=field_or_unary)
- else:
- raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary)
+class CollectionGroup(Query, BaseCollectionGroup):
+ """Represents a Collection Group in the Firestore API.
-def _cursor_pb(cursor_pair):
- """Convert a cursor pair to a protobuf.
-
- If ``cursor_pair`` is :data:`None`, just returns :data:`None`.
+ This is a specialization of :class:`.Query` that includes all documents in the
+ database that are contained in a collection or subcollection of the given
+ parent.
Args:
- cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of
-
- * a list of field values.
- * a ``before`` flag
-
- Returns:
- Optional[google.cloud.firestore_v1.types.Cursor]: A
- protobuf cursor corresponding to the values.
+ parent (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
+ The collection that this query applies to.
"""
- if cursor_pair is not None:
- data, before = cursor_pair
- value_pbs = [_helpers.encode_value(value) for value in data]
- return query_pb2.Cursor(values=value_pbs, before=before)
-
-def _query_response_to_snapshot(response_pb, collection, expected_prefix):
- """Parse a query response protobuf to a document snapshot.
+ def __init__(
+ self,
+ parent,
+ projection=None,
+ field_filters=(),
+ orders=(),
+ limit=None,
+ limit_to_last=False,
+ offset=None,
+ start_at=None,
+ end_at=None,
+ all_descendants=True,
+ ) -> None:
+ super(CollectionGroup, self).__init__(
+ parent=parent,
+ projection=projection,
+ field_filters=field_filters,
+ orders=orders,
+ limit=limit,
+ limit_to_last=limit_to_last,
+ offset=offset,
+ start_at=start_at,
+ end_at=end_at,
+ all_descendants=all_descendants,
+ )
- Args:
- response_pb (google.cloud.proto.firestore.v1.\
- firestore_pb2.RunQueryResponse): A
- collection (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
- A reference to the collection that initiated the query.
- expected_prefix (str): The expected prefix for fully-qualified
- document names returned in the query results. This can be computed
- directly from ``collection`` via :meth:`_parent_info`.
+ @staticmethod
+ def _get_query_class():
+ return Query
- Returns:
- Optional[:class:`~google.cloud.firestore.document.DocumentSnapshot`]:
- A snapshot of the data returned in the query. If
- ``response_pb.document`` is not set, the snapshot will be :data:`None`.
- """
- if not response_pb.HasField("document"):
- return None
+ def get_partitions(
+ self,
+ partition_count,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[QueryPartition, None, None]:
+ """Partition a query for parallelization.
- document_id = _helpers.get_doc_id(response_pb.document, expected_prefix)
- reference = collection.document(document_id)
- data = _helpers.decode_dict(response_pb.document.fields, collection._client)
- snapshot = document.DocumentSnapshot(
- reference,
- data,
- exists=True,
- read_time=response_pb.read_time,
- create_time=response_pb.document.create_time,
- update_time=response_pb.document.update_time,
- )
- return snapshot
+ Partitions a query by returning partition cursors that can be used to run the
+ query in parallel. The returned partition cursors are split points that can be
+ used as starting/end points for the query results.
+ Args:
+ partition_count (int): The desired maximum number of partition points. The
+ number must be strictly positive. The actual number of partitions
+ returned may be fewer.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+ """
+ request, kwargs = self._prep_get_partitions(partition_count, retry, timeout)
-def _collection_group_query_response_to_snapshot(response_pb, collection):
- """Parse a query response protobuf to a document snapshot.
+ pager = self._client._firestore_api.partition_query(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
- Args:
- response_pb (google.cloud.proto.firestore.v1.\
- firestore_pb2.RunQueryResponse): A
- collection (:class:`~google.cloud.firestore_v1.collection.CollectionReference`):
- A reference to the collection that initiated the query.
+ start_at = None
+ for cursor_pb in pager:
+ cursor = self._client.document(cursor_pb.values[0].reference_value)
+ yield QueryPartition(self, start_at, cursor)
+ start_at = cursor
- Returns:
- Optional[:class:`~google.cloud.firestore.document.DocumentSnapshot`]:
- A snapshot of the data returned in the query. If
- ``response_pb.document`` is not set, the snapshot will be :data:`None`.
- """
- if not response_pb.HasField("document"):
- return None
- reference = collection._client.document(response_pb.document.name)
- data = _helpers.decode_dict(response_pb.document.fields, collection._client)
- snapshot = document.DocumentSnapshot(
- reference,
- data,
- exists=True,
- read_time=response_pb.read_time,
- create_time=response_pb.document.create_time,
- update_time=response_pb.document.update_time,
- )
- return snapshot
+ yield QueryPartition(self, start_at, None)
diff --git a/google/cloud/firestore_v1/services/__init__.py b/google/cloud/firestore_v1/services/__init__.py
new file mode 100644
index 0000000000..42ffdf2bc4
--- /dev/null
+++ b/google/cloud/firestore_v1/services/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google/cloud/firestore_v1/services/firestore/__init__.py b/google/cloud/firestore_v1/services/firestore/__init__.py
new file mode 100644
index 0000000000..14099c8671
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .client import FirestoreClient
+from .async_client import FirestoreAsyncClient
+
+__all__ = (
+ "FirestoreClient",
+ "FirestoreAsyncClient",
+)
diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py
new file mode 100644
index 0000000000..3c00be1bfb
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/async_client.py
@@ -0,0 +1,1206 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, AsyncIterable, AsyncIterator, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.firestore_v1.services.firestore import pagers
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import query
+from google.cloud.firestore_v1.types import write as gf_write
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.rpc import status_pb2 as status # type: ignore
+
+from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport
+from .client import FirestoreClient
+
+
+class FirestoreAsyncClient:
+ """The Cloud Firestore service.
+ Cloud Firestore is a fast, fully managed, serverless, cloud-
+ native NoSQL document database that simplifies storing, syncing,
+ and querying data for your mobile, web, and IoT apps at global
+ scale. Its client libraries provide live synchronization and
+ offline support, while its security features and integrations
+ with Firebase and Google Cloud Platform (GCP) accelerate
+ building truly serverless apps.
+ """
+
+ _client: FirestoreClient
+
+ DEFAULT_ENDPOINT = FirestoreClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = FirestoreClient.DEFAULT_MTLS_ENDPOINT
+
+ from_service_account_file = FirestoreClient.from_service_account_file
+ from_service_account_json = from_service_account_file
+
+ get_transport_class = functools.partial(
+ type(FirestoreClient).get_transport_class, type(FirestoreClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, FirestoreTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the firestore client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.FirestoreTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = FirestoreClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def get_document(
+ self,
+ request: firestore.GetDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> document.Document:
+ r"""Gets a single document.
+
+ Args:
+ request (:class:`~.firestore.GetDocumentRequest`):
+ The request object. The request for
+ [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.document.Document:
+ A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.GetDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_documents(
+ self,
+ request: firestore.ListDocumentsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListDocumentsAsyncPager:
+ r"""Lists documents.
+
+ Args:
+ request (:class:`~.firestore.ListDocumentsRequest`):
+ The request object. The request for
+ [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListDocumentsAsyncPager:
+ The response for
+ [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.ListDocumentsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_documents,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListDocumentsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def update_document(
+ self,
+ request: firestore.UpdateDocumentRequest = None,
+ *,
+ document: gf_document.Document = None,
+ update_mask: common.DocumentMask = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> gf_document.Document:
+ r"""Updates or inserts a document.
+
+ Args:
+ request (:class:`~.firestore.UpdateDocumentRequest`):
+ The request object. The request for
+ [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument].
+ document (:class:`~.gf_document.Document`):
+ Required. The updated document.
+ Creates the document if it does not
+ already exist.
+ This corresponds to the ``document`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ update_mask (:class:`~.common.DocumentMask`):
+ The fields to update.
+ None of the field paths in the mask may
+ contain a reserved name.
+ If the document exists on the server and
+ has fields not referenced in the mask,
+ they are left unchanged.
+ Fields referenced in the mask, but not
+ present in the input document, are
+ deleted from the document on the server.
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.gf_document.Document:
+ A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([document, update_mask]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore.UpdateDocumentRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if document is not None:
+ request.document = document
+ if update_mask is not None:
+ request.update_mask = update_mask
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.update_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("document.name", request.document.name),)
+ ),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def delete_document(
+ self,
+ request: firestore.DeleteDocumentRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a document.
+
+ Args:
+ request (:class:`~.firestore.DeleteDocumentRequest`):
+ The request object. The request for
+ [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument].
+ name (:class:`str`):
+ Required. The resource name of the Document to delete.
+ In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore.DeleteDocumentRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def batch_get_documents(
+ self,
+ request: firestore.BatchGetDocumentsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> AsyncIterable[firestore.BatchGetDocumentsResponse]:
+ r"""Gets multiple documents.
+ Documents returned by this method are not guaranteed to
+ be returned in the same order that they were requested.
+
+ Args:
+ request (:class:`~.firestore.BatchGetDocumentsRequest`):
+ The request object. The request for
+ [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ AsyncIterable[~.firestore.BatchGetDocumentsResponse]:
+ The streamed response for
+ [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.BatchGetDocumentsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.batch_get_documents,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=300.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def begin_transaction(
+ self,
+ request: firestore.BeginTransactionRequest = None,
+ *,
+ database: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> firestore.BeginTransactionResponse:
+ r"""Starts a new transaction.
+
+ Args:
+ request (:class:`~.firestore.BeginTransactionRequest`):
+ The request object. The request for
+ [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
+ database (:class:`str`):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``database`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.firestore.BeginTransactionResponse:
+ The response for
+ [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([database]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore.BeginTransactionRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if database is not None:
+ request.database = database
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.begin_transaction,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def commit(
+ self,
+ request: firestore.CommitRequest = None,
+ *,
+ database: str = None,
+ writes: Sequence[gf_write.Write] = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> firestore.CommitResponse:
+ r"""Commits a transaction, while optionally updating
+ documents.
+
+ Args:
+ request (:class:`~.firestore.CommitRequest`):
+ The request object. The request for
+ [Firestore.Commit][google.firestore.v1.Firestore.Commit].
+ database (:class:`str`):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``database`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ writes (:class:`Sequence[~.gf_write.Write]`):
+ The writes to apply.
+ Always executed atomically and in order.
+ This corresponds to the ``writes`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.firestore.CommitResponse:
+ The response for
+ [Firestore.Commit][google.firestore.v1.Firestore.Commit].
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([database, writes]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore.CommitRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if database is not None:
+ request.database = database
+ if writes is not None:
+ request.writes = writes
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.commit,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def rollback(
+ self,
+ request: firestore.RollbackRequest = None,
+ *,
+ database: str = None,
+ transaction: bytes = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Rolls back a transaction.
+
+ Args:
+ request (:class:`~.firestore.RollbackRequest`):
+ The request object. The request for
+ [Firestore.Rollback][google.firestore.v1.Firestore.Rollback].
+ database (:class:`str`):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``database`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ transaction (:class:`bytes`):
+ Required. The transaction to roll
+ back.
+ This corresponds to the ``transaction`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([database, transaction]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore.RollbackRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if database is not None:
+ request.database = database
+ if transaction is not None:
+ request.transaction = transaction
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.rollback,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def run_query(
+ self,
+ request: firestore.RunQueryRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> AsyncIterable[firestore.RunQueryResponse]:
+ r"""Runs a query.
+
+ Args:
+ request (:class:`~.firestore.RunQueryRequest`):
+ The request object. The request for
+ [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ AsyncIterable[~.firestore.RunQueryResponse]:
+ The response for
+ [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.RunQueryRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.run_query,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=300.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def partition_query(
+ self,
+ request: firestore.PartitionQueryRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.PartitionQueryAsyncPager:
+ r"""Partitions a query by returning partition cursors
+ that can be used to run the query in parallel. The
+ returned partition cursors are split points that can be
+ used by RunQuery as starting/end points for the query
+ results.
+
+ Args:
+ request (:class:`~.firestore.PartitionQueryRequest`):
+ The request object. The request for
+ [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.PartitionQueryAsyncPager:
+ The response for
+ [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.PartitionQueryRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.partition_query,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=300.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.PartitionQueryAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def write(
+ self,
+ requests: AsyncIterator[firestore.WriteRequest] = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> AsyncIterable[firestore.WriteResponse]:
+ r"""Streams batches of document updates and deletes, in
+ order.
+
+ Args:
+ requests (AsyncIterator[`~.firestore.WriteRequest`]):
+ The request object AsyncIterator. The request for
+ [Firestore.Write][google.firestore.v1.Firestore.Write].
+ The first request creates a stream, or resumes an
+ existing one from a token.
+ When creating a new stream, the server replies with a
+ response containing only an ID and a token, to use in
+ the next request.
+
+ When resuming a stream, the server first streams any
+ responses later than the given token, then a response
+ containing only an up-to-date token, to use in the next
+ request.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ AsyncIterable[~.firestore.WriteResponse]:
+ The response for
+ [Firestore.Write][google.firestore.v1.Firestore.Write].
+
+ """
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.write,
+ default_timeout=86400.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),)
+
+ # Send the request.
+ response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def listen(
+ self,
+ requests: AsyncIterator[firestore.ListenRequest] = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> AsyncIterable[firestore.ListenResponse]:
+ r"""Listens to changes.
+
+ Args:
+ requests (AsyncIterator[`~.firestore.ListenRequest`]):
+ The request object AsyncIterator. A request for
+ [Firestore.Listen][google.firestore.v1.Firestore.Listen]
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ AsyncIterable[~.firestore.ListenResponse]:
+ The response for
+ [Firestore.Listen][google.firestore.v1.Firestore.Listen].
+
+ """
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.listen,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=86400.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),)
+
+ # Send the request.
+ response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_collection_ids(
+ self,
+ request: firestore.ListCollectionIdsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListCollectionIdsAsyncPager:
+ r"""Lists all the collection IDs underneath a document.
+
+ Args:
+ request (:class:`~.firestore.ListCollectionIdsRequest`):
+ The request object. The request for
+ [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+ parent (:class:`str`):
+ Required. The parent document. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListCollectionIdsAsyncPager:
+ The response from
+ [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([parent]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = firestore.ListCollectionIdsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_collection_ids,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListCollectionIdsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def batch_write(
+ self,
+ request: firestore.BatchWriteRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> firestore.BatchWriteResponse:
+ r"""Applies a batch of write operations.
+
+ The BatchWrite method does not apply the write operations
+ atomically and can apply them out of order. Method does not
+ allow more than one write per document. Each write succeeds or
+ fails independently. See the
+ [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for
+ the success status of each write.
+
+ If you require an atomically applied set of writes, use
+ [Commit][google.firestore.v1.Firestore.Commit] instead.
+
+ Args:
+ request (:class:`~.firestore.BatchWriteRequest`):
+ The request object. The request for
+ [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.firestore.BatchWriteResponse:
+ The response from
+ [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.BatchWriteRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.batch_write,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.Aborted, exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def create_document(
+ self,
+ request: firestore.CreateDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> document.Document:
+ r"""Creates a new document.
+
+ Args:
+ request (:class:`~.firestore.CreateDocumentRequest`):
+ The request object. The request for
+ [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.document.Document:
+ A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = firestore.CreateDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ ),
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("FirestoreAsyncClient",)
diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py
new file mode 100644
index 0000000000..527ba3c6ad
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/client.py
@@ -0,0 +1,1247 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import (
+ Callable,
+ Dict,
+ Optional,
+ Iterable,
+ Iterator,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+)
+import pkg_resources
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.firestore_v1.services.firestore import pagers
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import query
+from google.cloud.firestore_v1.types import write as gf_write
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.rpc import status_pb2 as status # type: ignore
+
+from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import FirestoreGrpcTransport
+from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport
+
+
+class FirestoreClientMeta(type):
+ """Metaclass for the Firestore client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]]
+ _transport_registry["grpc"] = FirestoreGrpcTransport
+ _transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport
+
+ def get_transport_class(cls, label: str = None,) -> Type[FirestoreTransport]:
+ """Return an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class FirestoreClient(metaclass=FirestoreClientMeta):
+ """The Cloud Firestore service.
+ Cloud Firestore is a fast, fully managed, serverless, cloud-
+ native NoSQL document database that simplifies storing, syncing,
+ and querying data for your mobile, web, and IoT apps at global
+ scale. Its client libraries provide live synchronization and
+ offline support, while its security features and integrations
+ with Firebase and Google Cloud Platform (GCP) accelerate
+ building truly serverless apps.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "firestore.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ {@api.name}: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, FirestoreTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the firestore client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.FirestoreTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, FirestoreTransport):
+ # transport is a FirestoreTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ ssl_channel_credentials=ssl_credentials,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ )
+
+ def get_document(
+ self,
+ request: firestore.GetDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> document.Document:
+ r"""Gets a single document.
+
+ Args:
+ request (:class:`~.firestore.GetDocumentRequest`):
+ The request object. The request for
+ [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.document.Document:
+ A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.GetDocumentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.GetDocumentRequest):
+ request = firestore.GetDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_document]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def list_documents(
+ self,
+ request: firestore.ListDocumentsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListDocumentsPager:
+ r"""Lists documents.
+
+ Args:
+ request (:class:`~.firestore.ListDocumentsRequest`):
+ The request object. The request for
+ [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListDocumentsPager:
+ The response for
+ [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.ListDocumentsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.ListDocumentsRequest):
+ request = firestore.ListDocumentsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_documents]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListDocumentsPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def update_document(
+ self,
+ request: firestore.UpdateDocumentRequest = None,
+ *,
+ document: gf_document.Document = None,
+ update_mask: common.DocumentMask = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> gf_document.Document:
+ r"""Updates or inserts a document.
+
+ Args:
+ request (:class:`~.firestore.UpdateDocumentRequest`):
+ The request object. The request for
+ [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument].
+ document (:class:`~.gf_document.Document`):
+ Required. The updated document.
+ Creates the document if it does not
+ already exist.
+ This corresponds to the ``document`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ update_mask (:class:`~.common.DocumentMask`):
+ The fields to update.
+ None of the field paths in the mask may
+ contain a reserved name.
+ If the document exists on the server and
+ has fields not referenced in the mask,
+ they are left unchanged.
+ Fields referenced in the mask, but not
+ present in the input document, are
+ deleted from the document on the server.
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.gf_document.Document:
+ A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([document, update_mask])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.UpdateDocumentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.UpdateDocumentRequest):
+ request = firestore.UpdateDocumentRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if document is not None:
+ request.document = document
+ if update_mask is not None:
+ request.update_mask = update_mask
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.update_document]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("document.name", request.document.name),)
+ ),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def delete_document(
+ self,
+ request: firestore.DeleteDocumentRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a document.
+
+ Args:
+ request (:class:`~.firestore.DeleteDocumentRequest`):
+ The request object. The request for
+ [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument].
+ name (:class:`str`):
+ Required. The resource name of the Document to delete.
+ In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.DeleteDocumentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.DeleteDocumentRequest):
+ request = firestore.DeleteDocumentRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_document]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def batch_get_documents(
+ self,
+ request: firestore.BatchGetDocumentsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> Iterable[firestore.BatchGetDocumentsResponse]:
+ r"""Gets multiple documents.
+ Documents returned by this method are not guaranteed to
+ be returned in the same order that they were requested.
+
+ Args:
+ request (:class:`~.firestore.BatchGetDocumentsRequest`):
+ The request object. The request for
+ [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ Iterable[~.firestore.BatchGetDocumentsResponse]:
+ The streamed response for
+ [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.BatchGetDocumentsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.BatchGetDocumentsRequest):
+ request = firestore.BatchGetDocumentsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.batch_get_documents]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def begin_transaction(
+ self,
+ request: firestore.BeginTransactionRequest = None,
+ *,
+ database: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> firestore.BeginTransactionResponse:
+ r"""Starts a new transaction.
+
+ Args:
+ request (:class:`~.firestore.BeginTransactionRequest`):
+ The request object. The request for
+ [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
+ database (:class:`str`):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``database`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.firestore.BeginTransactionResponse:
+ The response for
+ [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([database])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.BeginTransactionRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.BeginTransactionRequest):
+ request = firestore.BeginTransactionRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if database is not None:
+ request.database = database
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.begin_transaction]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def commit(
+ self,
+ request: firestore.CommitRequest = None,
+ *,
+ database: str = None,
+ writes: Sequence[gf_write.Write] = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> firestore.CommitResponse:
+ r"""Commits a transaction, while optionally updating
+ documents.
+
+ Args:
+ request (:class:`~.firestore.CommitRequest`):
+ The request object. The request for
+ [Firestore.Commit][google.firestore.v1.Firestore.Commit].
+ database (:class:`str`):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``database`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ writes (:class:`Sequence[~.gf_write.Write]`):
+ The writes to apply.
+ Always executed atomically and in order.
+ This corresponds to the ``writes`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.firestore.CommitResponse:
+ The response for
+ [Firestore.Commit][google.firestore.v1.Firestore.Commit].
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([database, writes])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.CommitRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.CommitRequest):
+ request = firestore.CommitRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if database is not None:
+ request.database = database
+ if writes is not None:
+ request.writes = writes
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.commit]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def rollback(
+ self,
+ request: firestore.RollbackRequest = None,
+ *,
+ database: str = None,
+ transaction: bytes = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Rolls back a transaction.
+
+ Args:
+ request (:class:`~.firestore.RollbackRequest`):
+ The request object. The request for
+ [Firestore.Rollback][google.firestore.v1.Firestore.Rollback].
+ database (:class:`str`):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ This corresponds to the ``database`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ transaction (:class:`bytes`):
+ Required. The transaction to roll
+ back.
+ This corresponds to the ``transaction`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([database, transaction])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.RollbackRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.RollbackRequest):
+ request = firestore.RollbackRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if database is not None:
+ request.database = database
+ if transaction is not None:
+ request.transaction = transaction
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.rollback]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def run_query(
+ self,
+ request: firestore.RunQueryRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> Iterable[firestore.RunQueryResponse]:
+ r"""Runs a query.
+
+ Args:
+ request (:class:`~.firestore.RunQueryRequest`):
+ The request object. The request for
+ [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ Iterable[~.firestore.RunQueryResponse]:
+ The response for
+ [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.RunQueryRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.RunQueryRequest):
+ request = firestore.RunQueryRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.run_query]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def partition_query(
+ self,
+ request: firestore.PartitionQueryRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.PartitionQueryPager:
+ r"""Partitions a query by returning partition cursors
+ that can be used to run the query in parallel. The
+ returned partition cursors are split points that can be
+ used by RunQuery as starting/end points for the query
+ results.
+
+ Args:
+ request (:class:`~.firestore.PartitionQueryRequest`):
+ The request object. The request for
+ [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.PartitionQueryPager:
+ The response for
+ [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.PartitionQueryRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.PartitionQueryRequest):
+ request = firestore.PartitionQueryRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.partition_query]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.PartitionQueryPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def write(
+ self,
+ requests: Iterator[firestore.WriteRequest] = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> Iterable[firestore.WriteResponse]:
+ r"""Streams batches of document updates and deletes, in
+ order.
+
+ Args:
+ requests (Iterator[`~.firestore.WriteRequest`]):
+ The request object iterator. The request for
+ [Firestore.Write][google.firestore.v1.Firestore.Write].
+ The first request creates a stream, or resumes an
+ existing one from a token.
+ When creating a new stream, the server replies with a
+ response containing only an ID and a token, to use in
+ the next request.
+
+ When resuming a stream, the server first streams any
+ responses later than the given token, then a response
+ containing only an up-to-date token, to use in the next
+ request.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ Iterable[~.firestore.WriteResponse]:
+ The response for
+ [Firestore.Write][google.firestore.v1.Firestore.Write].
+
+ """
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.write]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),)
+
+ # Send the request.
+ response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def listen(
+ self,
+ requests: Iterator[firestore.ListenRequest] = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> Iterable[firestore.ListenResponse]:
+ r"""Listens to changes.
+
+ Args:
+ requests (Iterator[`~.firestore.ListenRequest`]):
+ The request object iterator. A request for
+ [Firestore.Listen][google.firestore.v1.Firestore.Listen]
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ Iterable[~.firestore.ListenResponse]:
+ The response for
+ [Firestore.Listen][google.firestore.v1.Firestore.Listen].
+
+ """
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.listen]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),)
+
+ # Send the request.
+ response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def list_collection_ids(
+ self,
+ request: firestore.ListCollectionIdsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListCollectionIdsPager:
+ r"""Lists all the collection IDs underneath a document.
+
+ Args:
+ request (:class:`~.firestore.ListCollectionIdsRequest`):
+ The request object. The request for
+ [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+ parent (:class:`str`):
+ Required. The parent document. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListCollectionIdsPager:
+ The response from
+ [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.ListCollectionIdsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.ListCollectionIdsRequest):
+ request = firestore.ListCollectionIdsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_collection_ids]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListCollectionIdsPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def batch_write(
+ self,
+ request: firestore.BatchWriteRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> firestore.BatchWriteResponse:
+ r"""Applies a batch of write operations.
+
+ The BatchWrite method does not apply the write operations
+ atomically and can apply them out of order. Method does not
+ allow more than one write per document. Each write succeeds or
+ fails independently. See the
+ [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for
+ the success status of each write.
+
+ If you require an atomically applied set of writes, use
+ [Commit][google.firestore.v1.Firestore.Commit] instead.
+
+ Args:
+ request (:class:`~.firestore.BatchWriteRequest`):
+ The request object. The request for
+ [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.firestore.BatchWriteResponse:
+ The response from
+ [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.BatchWriteRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.BatchWriteRequest):
+ request = firestore.BatchWriteRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.batch_write]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def create_document(
+ self,
+ request: firestore.CreateDocumentRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> document.Document:
+ r"""Creates a new document.
+
+ Args:
+ request (:class:`~.firestore.CreateDocumentRequest`):
+ The request object. The request for
+ [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.document.Document:
+ A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a firestore.CreateDocumentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, firestore.CreateDocumentRequest):
+ request = firestore.CreateDocumentRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.create_document]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("FirestoreClient",)
diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py
new file mode 100644
index 0000000000..708ec0adef
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/pagers.py
@@ -0,0 +1,406 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import query
+
+
+class ListDocumentsPager:
+ """A pager for iterating through ``list_documents`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore.ListDocumentsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``documents`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListDocuments`` requests and continue to iterate
+ through the ``documents`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore.ListDocumentsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., firestore.ListDocumentsResponse],
+ request: firestore.ListDocumentsRequest,
+ response: firestore.ListDocumentsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore.ListDocumentsRequest`):
+ The initial request object.
+ response (:class:`~.firestore.ListDocumentsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore.ListDocumentsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[firestore.ListDocumentsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[document.Document]:
+ for page in self.pages:
+ yield from page.documents
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListDocumentsAsyncPager:
+ """A pager for iterating through ``list_documents`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore.ListDocumentsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``documents`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListDocuments`` requests and continue to iterate
+ through the ``documents`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore.ListDocumentsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[firestore.ListDocumentsResponse]],
+ request: firestore.ListDocumentsRequest,
+ response: firestore.ListDocumentsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore.ListDocumentsRequest`):
+ The initial request object.
+ response (:class:`~.firestore.ListDocumentsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore.ListDocumentsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[firestore.ListDocumentsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[document.Document]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.documents:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class PartitionQueryPager:
+ """A pager for iterating through ``partition_query`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore.PartitionQueryResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``partitions`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``PartitionQuery`` requests and continue to iterate
+ through the ``partitions`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore.PartitionQueryResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., firestore.PartitionQueryResponse],
+ request: firestore.PartitionQueryRequest,
+ response: firestore.PartitionQueryResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore.PartitionQueryRequest`):
+ The initial request object.
+ response (:class:`~.firestore.PartitionQueryResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore.PartitionQueryRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[firestore.PartitionQueryResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[query.Cursor]:
+ for page in self.pages:
+ yield from page.partitions
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class PartitionQueryAsyncPager:
+ """A pager for iterating through ``partition_query`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore.PartitionQueryResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``partitions`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``PartitionQuery`` requests and continue to iterate
+ through the ``partitions`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore.PartitionQueryResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[firestore.PartitionQueryResponse]],
+ request: firestore.PartitionQueryRequest,
+ response: firestore.PartitionQueryResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore.PartitionQueryRequest`):
+ The initial request object.
+ response (:class:`~.firestore.PartitionQueryResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore.PartitionQueryRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[firestore.PartitionQueryResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[query.Cursor]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.partitions:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListCollectionIdsPager:
+ """A pager for iterating through ``list_collection_ids`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore.ListCollectionIdsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``collection_ids`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListCollectionIds`` requests and continue to iterate
+ through the ``collection_ids`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore.ListCollectionIdsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., firestore.ListCollectionIdsResponse],
+ request: firestore.ListCollectionIdsRequest,
+ response: firestore.ListCollectionIdsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore.ListCollectionIdsRequest`):
+ The initial request object.
+ response (:class:`~.firestore.ListCollectionIdsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore.ListCollectionIdsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[firestore.ListCollectionIdsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[str]:
+ for page in self.pages:
+ yield from page.collection_ids
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListCollectionIdsAsyncPager:
+ """A pager for iterating through ``list_collection_ids`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.firestore.ListCollectionIdsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``collection_ids`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListCollectionIds`` requests and continue to iterate
+ through the ``collection_ids`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.firestore.ListCollectionIdsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[firestore.ListCollectionIdsResponse]],
+ request: firestore.ListCollectionIdsRequest,
+ response: firestore.ListCollectionIdsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.firestore.ListCollectionIdsRequest`):
+ The initial request object.
+ response (:class:`~.firestore.ListCollectionIdsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = firestore.ListCollectionIdsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[firestore.ListCollectionIdsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[str]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.collection_ids:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1/services/firestore/transports/__init__.py
new file mode 100644
index 0000000000..ce6aa3a9d1
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/transports/__init__.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import FirestoreTransport
+from .grpc import FirestoreGrpcTransport
+from .grpc_asyncio import FirestoreGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]]
+_transport_registry["grpc"] = FirestoreGrpcTransport
+_transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport
+
+
+__all__ = (
+ "FirestoreTransport",
+ "FirestoreGrpcTransport",
+ "FirestoreGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py
new file mode 100644
index 0000000000..6a0e3a7d36
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/transports/base.py
@@ -0,0 +1,475 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import abc
+import typing
+import pkg_resources
+
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import firestore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class FirestoreTransport(abc.ABC):
+ """Abstract transport class for Firestore."""
+
+ AUTH_SCOPES = (
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages(client_info)
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.get_document: gapic_v1.method.wrap_method(
+ self.get_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.list_documents: gapic_v1.method.wrap_method(
+ self.list_documents,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.update_document: gapic_v1.method.wrap_method(
+ self.update_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.delete_document: gapic_v1.method.wrap_method(
+ self.delete_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.batch_get_documents: gapic_v1.method.wrap_method(
+ self.batch_get_documents,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=300.0,
+ client_info=client_info,
+ ),
+ self.begin_transaction: gapic_v1.method.wrap_method(
+ self.begin_transaction,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.commit: gapic_v1.method.wrap_method(
+ self.commit,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.rollback: gapic_v1.method.wrap_method(
+ self.rollback,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.run_query: gapic_v1.method.wrap_method(
+ self.run_query,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=300.0,
+ client_info=client_info,
+ ),
+ self.partition_query: gapic_v1.method.wrap_method(
+ self.partition_query,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=300.0,
+ client_info=client_info,
+ ),
+ self.write: gapic_v1.method.wrap_method(
+ self.write, default_timeout=86400.0, client_info=client_info,
+ ),
+ self.listen: gapic_v1.method.wrap_method(
+ self.listen,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=86400.0,
+ client_info=client_info,
+ ),
+ self.list_collection_ids: gapic_v1.method.wrap_method(
+ self.list_collection_ids,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded,
+ exceptions.InternalServerError,
+ exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.batch_write: gapic_v1.method.wrap_method(
+ self.batch_write,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.Aborted, exceptions.ServiceUnavailable,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.create_document: gapic_v1.method.wrap_method(
+ self.create_document,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ }
+
+ @property
+ def get_document(
+ self,
+ ) -> typing.Callable[
+ [firestore.GetDocumentRequest],
+ typing.Union[document.Document, typing.Awaitable[document.Document]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def list_documents(
+ self,
+ ) -> typing.Callable[
+ [firestore.ListDocumentsRequest],
+ typing.Union[
+ firestore.ListDocumentsResponse,
+ typing.Awaitable[firestore.ListDocumentsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def update_document(
+ self,
+ ) -> typing.Callable[
+ [firestore.UpdateDocumentRequest],
+ typing.Union[gf_document.Document, typing.Awaitable[gf_document.Document]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_document(
+ self,
+ ) -> typing.Callable[
+ [firestore.DeleteDocumentRequest],
+ typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def batch_get_documents(
+ self,
+ ) -> typing.Callable[
+ [firestore.BatchGetDocumentsRequest],
+ typing.Union[
+ firestore.BatchGetDocumentsResponse,
+ typing.Awaitable[firestore.BatchGetDocumentsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def begin_transaction(
+ self,
+ ) -> typing.Callable[
+ [firestore.BeginTransactionRequest],
+ typing.Union[
+ firestore.BeginTransactionResponse,
+ typing.Awaitable[firestore.BeginTransactionResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def commit(
+ self,
+ ) -> typing.Callable[
+ [firestore.CommitRequest],
+ typing.Union[
+ firestore.CommitResponse, typing.Awaitable[firestore.CommitResponse]
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def rollback(
+ self,
+ ) -> typing.Callable[
+ [firestore.RollbackRequest],
+ typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def run_query(
+ self,
+ ) -> typing.Callable[
+ [firestore.RunQueryRequest],
+ typing.Union[
+ firestore.RunQueryResponse, typing.Awaitable[firestore.RunQueryResponse]
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def partition_query(
+ self,
+ ) -> typing.Callable[
+ [firestore.PartitionQueryRequest],
+ typing.Union[
+ firestore.PartitionQueryResponse,
+ typing.Awaitable[firestore.PartitionQueryResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def write(
+ self,
+ ) -> typing.Callable[
+ [firestore.WriteRequest],
+ typing.Union[
+ firestore.WriteResponse, typing.Awaitable[firestore.WriteResponse]
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def listen(
+ self,
+ ) -> typing.Callable[
+ [firestore.ListenRequest],
+ typing.Union[
+ firestore.ListenResponse, typing.Awaitable[firestore.ListenResponse]
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def list_collection_ids(
+ self,
+ ) -> typing.Callable[
+ [firestore.ListCollectionIdsRequest],
+ typing.Union[
+ firestore.ListCollectionIdsResponse,
+ typing.Awaitable[firestore.ListCollectionIdsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def batch_write(
+ self,
+ ) -> typing.Callable[
+ [firestore.BatchWriteRequest],
+ typing.Union[
+ firestore.BatchWriteResponse, typing.Awaitable[firestore.BatchWriteResponse]
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def create_document(
+ self,
+ ) -> typing.Callable[
+ [firestore.CreateDocumentRequest],
+ typing.Union[document.Document, typing.Awaitable[document.Document]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("FirestoreTransport",)
diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py
new file mode 100644
index 0000000000..417ae59c81
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py
@@ -0,0 +1,649 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import firestore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import FirestoreTransport, DEFAULT_CLIENT_INFO
+
+
+class FirestoreGrpcTransport(FirestoreTransport):
+ """gRPC backend transport for Firestore.
+
+ The Cloud Firestore service.
+ Cloud Firestore is a fast, fully managed, serverless, cloud-
+ native NoSQL document database that simplifies storing, syncing,
+ and querying data for your mobile, web, and IoT apps at global
+ scale. Its client libraries provide live synchronization and
+ offline support, while its security features and integrations
+ with Firebase and Google Cloud Platform (GCP) accelerate
+ building truly serverless apps.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _stubs: Dict[str, Callable]
+
+ def __init__(
+ self,
+ *,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ channel (Optional[grpc.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ self._stubs = {} # type: Dict[str, Callable]
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> grpc.Channel:
+ """Create and return a gRPC channel object.
+ Args:
+ address (Optionsl[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ @property
+ def grpc_channel(self) -> grpc.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def get_document(
+ self,
+ ) -> Callable[[firestore.GetDocumentRequest], document.Document]:
+ r"""Return a callable for the get document method over gRPC.
+
+ Gets a single document.
+
+ Returns:
+ Callable[[~.GetDocumentRequest],
+ ~.Document]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_document" not in self._stubs:
+ self._stubs["get_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/GetDocument",
+ request_serializer=firestore.GetDocumentRequest.serialize,
+ response_deserializer=document.Document.deserialize,
+ )
+ return self._stubs["get_document"]
+
+ @property
+ def list_documents(
+ self,
+ ) -> Callable[[firestore.ListDocumentsRequest], firestore.ListDocumentsResponse]:
+ r"""Return a callable for the list documents method over gRPC.
+
+ Lists documents.
+
+ Returns:
+ Callable[[~.ListDocumentsRequest],
+ ~.ListDocumentsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_documents" not in self._stubs:
+ self._stubs["list_documents"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/ListDocuments",
+ request_serializer=firestore.ListDocumentsRequest.serialize,
+ response_deserializer=firestore.ListDocumentsResponse.deserialize,
+ )
+ return self._stubs["list_documents"]
+
+ @property
+ def update_document(
+ self,
+ ) -> Callable[[firestore.UpdateDocumentRequest], gf_document.Document]:
+ r"""Return a callable for the update document method over gRPC.
+
+ Updates or inserts a document.
+
+ Returns:
+ Callable[[~.UpdateDocumentRequest],
+ ~.Document]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_document" not in self._stubs:
+ self._stubs["update_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/UpdateDocument",
+ request_serializer=firestore.UpdateDocumentRequest.serialize,
+ response_deserializer=gf_document.Document.deserialize,
+ )
+ return self._stubs["update_document"]
+
+ @property
+ def delete_document(
+ self,
+ ) -> Callable[[firestore.DeleteDocumentRequest], empty.Empty]:
+ r"""Return a callable for the delete document method over gRPC.
+
+ Deletes a document.
+
+ Returns:
+ Callable[[~.DeleteDocumentRequest],
+ ~.Empty]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_document" not in self._stubs:
+ self._stubs["delete_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/DeleteDocument",
+ request_serializer=firestore.DeleteDocumentRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_document"]
+
+ @property
+ def batch_get_documents(
+ self,
+ ) -> Callable[
+ [firestore.BatchGetDocumentsRequest], firestore.BatchGetDocumentsResponse
+ ]:
+ r"""Return a callable for the batch get documents method over gRPC.
+
+ Gets multiple documents.
+ Documents returned by this method are not guaranteed to
+ be returned in the same order that they were requested.
+
+ Returns:
+ Callable[[~.BatchGetDocumentsRequest],
+ ~.BatchGetDocumentsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_get_documents" not in self._stubs:
+ self._stubs["batch_get_documents"] = self.grpc_channel.unary_stream(
+ "/google.firestore.v1.Firestore/BatchGetDocuments",
+ request_serializer=firestore.BatchGetDocumentsRequest.serialize,
+ response_deserializer=firestore.BatchGetDocumentsResponse.deserialize,
+ )
+ return self._stubs["batch_get_documents"]
+
+ @property
+ def begin_transaction(
+ self,
+ ) -> Callable[
+ [firestore.BeginTransactionRequest], firestore.BeginTransactionResponse
+ ]:
+ r"""Return a callable for the begin transaction method over gRPC.
+
+ Starts a new transaction.
+
+ Returns:
+ Callable[[~.BeginTransactionRequest],
+ ~.BeginTransactionResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "begin_transaction" not in self._stubs:
+ self._stubs["begin_transaction"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/BeginTransaction",
+ request_serializer=firestore.BeginTransactionRequest.serialize,
+ response_deserializer=firestore.BeginTransactionResponse.deserialize,
+ )
+ return self._stubs["begin_transaction"]
+
+ @property
+ def commit(self) -> Callable[[firestore.CommitRequest], firestore.CommitResponse]:
+ r"""Return a callable for the commit method over gRPC.
+
+ Commits a transaction, while optionally updating
+ documents.
+
+ Returns:
+ Callable[[~.CommitRequest],
+ ~.CommitResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "commit" not in self._stubs:
+ self._stubs["commit"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/Commit",
+ request_serializer=firestore.CommitRequest.serialize,
+ response_deserializer=firestore.CommitResponse.deserialize,
+ )
+ return self._stubs["commit"]
+
+ @property
+ def rollback(self) -> Callable[[firestore.RollbackRequest], empty.Empty]:
+ r"""Return a callable for the rollback method over gRPC.
+
+ Rolls back a transaction.
+
+ Returns:
+ Callable[[~.RollbackRequest],
+ ~.Empty]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "rollback" not in self._stubs:
+ self._stubs["rollback"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/Rollback",
+ request_serializer=firestore.RollbackRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["rollback"]
+
+ @property
+ def run_query(
+ self,
+ ) -> Callable[[firestore.RunQueryRequest], firestore.RunQueryResponse]:
+ r"""Return a callable for the run query method over gRPC.
+
+ Runs a query.
+
+ Returns:
+ Callable[[~.RunQueryRequest],
+ ~.RunQueryResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_query" not in self._stubs:
+ self._stubs["run_query"] = self.grpc_channel.unary_stream(
+ "/google.firestore.v1.Firestore/RunQuery",
+ request_serializer=firestore.RunQueryRequest.serialize,
+ response_deserializer=firestore.RunQueryResponse.deserialize,
+ )
+ return self._stubs["run_query"]
+
+ @property
+ def partition_query(
+ self,
+ ) -> Callable[[firestore.PartitionQueryRequest], firestore.PartitionQueryResponse]:
+ r"""Return a callable for the partition query method over gRPC.
+
+ Partitions a query by returning partition cursors
+ that can be used to run the query in parallel. The
+ returned partition cursors are split points that can be
+ used by RunQuery as starting/end points for the query
+ results.
+
+ Returns:
+ Callable[[~.PartitionQueryRequest],
+ ~.PartitionQueryResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "partition_query" not in self._stubs:
+ self._stubs["partition_query"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/PartitionQuery",
+ request_serializer=firestore.PartitionQueryRequest.serialize,
+ response_deserializer=firestore.PartitionQueryResponse.deserialize,
+ )
+ return self._stubs["partition_query"]
+
+ @property
+ def write(self) -> Callable[[firestore.WriteRequest], firestore.WriteResponse]:
+ r"""Return a callable for the write method over gRPC.
+
+ Streams batches of document updates and deletes, in
+ order.
+
+ Returns:
+ Callable[[~.WriteRequest],
+ ~.WriteResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "write" not in self._stubs:
+ self._stubs["write"] = self.grpc_channel.stream_stream(
+ "/google.firestore.v1.Firestore/Write",
+ request_serializer=firestore.WriteRequest.serialize,
+ response_deserializer=firestore.WriteResponse.deserialize,
+ )
+ return self._stubs["write"]
+
+ @property
+ def listen(self) -> Callable[[firestore.ListenRequest], firestore.ListenResponse]:
+ r"""Return a callable for the listen method over gRPC.
+
+ Listens to changes.
+
+ Returns:
+ Callable[[~.ListenRequest],
+ ~.ListenResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "listen" not in self._stubs:
+ self._stubs["listen"] = self.grpc_channel.stream_stream(
+ "/google.firestore.v1.Firestore/Listen",
+ request_serializer=firestore.ListenRequest.serialize,
+ response_deserializer=firestore.ListenResponse.deserialize,
+ )
+ return self._stubs["listen"]
+
+ @property
+ def list_collection_ids(
+ self,
+ ) -> Callable[
+ [firestore.ListCollectionIdsRequest], firestore.ListCollectionIdsResponse
+ ]:
+ r"""Return a callable for the list collection ids method over gRPC.
+
+ Lists all the collection IDs underneath a document.
+
+ Returns:
+ Callable[[~.ListCollectionIdsRequest],
+ ~.ListCollectionIdsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_collection_ids" not in self._stubs:
+ self._stubs["list_collection_ids"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/ListCollectionIds",
+ request_serializer=firestore.ListCollectionIdsRequest.serialize,
+ response_deserializer=firestore.ListCollectionIdsResponse.deserialize,
+ )
+ return self._stubs["list_collection_ids"]
+
+ @property
+ def batch_write(
+ self,
+ ) -> Callable[[firestore.BatchWriteRequest], firestore.BatchWriteResponse]:
+ r"""Return a callable for the batch write method over gRPC.
+
+ Applies a batch of write operations.
+
+ The BatchWrite method does not apply the write operations
+ atomically and can apply them out of order. Method does not
+ allow more than one write per document. Each write succeeds or
+ fails independently. See the
+ [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for
+ the success status of each write.
+
+ If you require an atomically applied set of writes, use
+ [Commit][google.firestore.v1.Firestore.Commit] instead.
+
+ Returns:
+ Callable[[~.BatchWriteRequest],
+ ~.BatchWriteResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_write" not in self._stubs:
+ self._stubs["batch_write"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/BatchWrite",
+ request_serializer=firestore.BatchWriteRequest.serialize,
+ response_deserializer=firestore.BatchWriteResponse.deserialize,
+ )
+ return self._stubs["batch_write"]
+
+ @property
+ def create_document(
+ self,
+ ) -> Callable[[firestore.CreateDocumentRequest], document.Document]:
+ r"""Return a callable for the create document method over gRPC.
+
+ Creates a new document.
+
+ Returns:
+ Callable[[~.CreateDocumentRequest],
+ ~.Document]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_document" not in self._stubs:
+ self._stubs["create_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/CreateDocument",
+ request_serializer=firestore.CreateDocumentRequest.serialize,
+ response_deserializer=document.Document.deserialize,
+ )
+ return self._stubs["create_document"]
+
+
+__all__ = ("FirestoreGrpcTransport",)
diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py
new file mode 100644
index 0000000000..9860449499
--- /dev/null
+++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py
@@ -0,0 +1,664 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import firestore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import FirestoreTransport, DEFAULT_CLIENT_INFO
+from .grpc import FirestoreGrpcTransport
+
+
+class FirestoreGrpcAsyncIOTransport(FirestoreTransport):
+ """gRPC AsyncIO backend transport for Firestore.
+
+ The Cloud Firestore service.
+ Cloud Firestore is a fast, fully managed, serverless, cloud-
+ native NoSQL document database that simplifies storing, syncing,
+ and querying data for your mobile, web, and IoT apps at global
+ scale. Its client libraries provide live synchronization and
+ offline support, while its security features and integrations
+ with Firebase and Google Cloud Platform (GCP) accelerate
+ building truly serverless apps.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "firestore.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ self._stubs = {}
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def get_document(
+ self,
+ ) -> Callable[[firestore.GetDocumentRequest], Awaitable[document.Document]]:
+ r"""Return a callable for the get document method over gRPC.
+
+ Gets a single document.
+
+ Returns:
+ Callable[[~.GetDocumentRequest],
+ Awaitable[~.Document]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_document" not in self._stubs:
+ self._stubs["get_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/GetDocument",
+ request_serializer=firestore.GetDocumentRequest.serialize,
+ response_deserializer=document.Document.deserialize,
+ )
+ return self._stubs["get_document"]
+
+ @property
+ def list_documents(
+ self,
+ ) -> Callable[
+ [firestore.ListDocumentsRequest], Awaitable[firestore.ListDocumentsResponse]
+ ]:
+ r"""Return a callable for the list documents method over gRPC.
+
+ Lists documents.
+
+ Returns:
+ Callable[[~.ListDocumentsRequest],
+ Awaitable[~.ListDocumentsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_documents" not in self._stubs:
+ self._stubs["list_documents"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/ListDocuments",
+ request_serializer=firestore.ListDocumentsRequest.serialize,
+ response_deserializer=firestore.ListDocumentsResponse.deserialize,
+ )
+ return self._stubs["list_documents"]
+
+ @property
+ def update_document(
+ self,
+ ) -> Callable[[firestore.UpdateDocumentRequest], Awaitable[gf_document.Document]]:
+ r"""Return a callable for the update document method over gRPC.
+
+ Updates or inserts a document.
+
+ Returns:
+ Callable[[~.UpdateDocumentRequest],
+ Awaitable[~.Document]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_document" not in self._stubs:
+ self._stubs["update_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/UpdateDocument",
+ request_serializer=firestore.UpdateDocumentRequest.serialize,
+ response_deserializer=gf_document.Document.deserialize,
+ )
+ return self._stubs["update_document"]
+
+ @property
+ def delete_document(
+ self,
+ ) -> Callable[[firestore.DeleteDocumentRequest], Awaitable[empty.Empty]]:
+ r"""Return a callable for the delete document method over gRPC.
+
+ Deletes a document.
+
+ Returns:
+ Callable[[~.DeleteDocumentRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_document" not in self._stubs:
+ self._stubs["delete_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/DeleteDocument",
+ request_serializer=firestore.DeleteDocumentRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_document"]
+
+ @property
+ def batch_get_documents(
+ self,
+ ) -> Callable[
+ [firestore.BatchGetDocumentsRequest],
+ Awaitable[firestore.BatchGetDocumentsResponse],
+ ]:
+ r"""Return a callable for the batch get documents method over gRPC.
+
+ Gets multiple documents.
+ Documents returned by this method are not guaranteed to
+ be returned in the same order that they were requested.
+
+ Returns:
+ Callable[[~.BatchGetDocumentsRequest],
+ Awaitable[~.BatchGetDocumentsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_get_documents" not in self._stubs:
+ self._stubs["batch_get_documents"] = self.grpc_channel.unary_stream(
+ "/google.firestore.v1.Firestore/BatchGetDocuments",
+ request_serializer=firestore.BatchGetDocumentsRequest.serialize,
+ response_deserializer=firestore.BatchGetDocumentsResponse.deserialize,
+ )
+ return self._stubs["batch_get_documents"]
+
+ @property
+ def begin_transaction(
+ self,
+ ) -> Callable[
+ [firestore.BeginTransactionRequest],
+ Awaitable[firestore.BeginTransactionResponse],
+ ]:
+ r"""Return a callable for the begin transaction method over gRPC.
+
+ Starts a new transaction.
+
+ Returns:
+ Callable[[~.BeginTransactionRequest],
+ Awaitable[~.BeginTransactionResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "begin_transaction" not in self._stubs:
+ self._stubs["begin_transaction"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/BeginTransaction",
+ request_serializer=firestore.BeginTransactionRequest.serialize,
+ response_deserializer=firestore.BeginTransactionResponse.deserialize,
+ )
+ return self._stubs["begin_transaction"]
+
+ @property
+ def commit(
+ self,
+ ) -> Callable[[firestore.CommitRequest], Awaitable[firestore.CommitResponse]]:
+ r"""Return a callable for the commit method over gRPC.
+
+ Commits a transaction, while optionally updating
+ documents.
+
+ Returns:
+ Callable[[~.CommitRequest],
+ Awaitable[~.CommitResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "commit" not in self._stubs:
+ self._stubs["commit"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/Commit",
+ request_serializer=firestore.CommitRequest.serialize,
+ response_deserializer=firestore.CommitResponse.deserialize,
+ )
+ return self._stubs["commit"]
+
+ @property
+ def rollback(self) -> Callable[[firestore.RollbackRequest], Awaitable[empty.Empty]]:
+ r"""Return a callable for the rollback method over gRPC.
+
+ Rolls back a transaction.
+
+ Returns:
+ Callable[[~.RollbackRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "rollback" not in self._stubs:
+ self._stubs["rollback"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/Rollback",
+ request_serializer=firestore.RollbackRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["rollback"]
+
+ @property
+ def run_query(
+ self,
+ ) -> Callable[[firestore.RunQueryRequest], Awaitable[firestore.RunQueryResponse]]:
+ r"""Return a callable for the run query method over gRPC.
+
+ Runs a query.
+
+ Returns:
+ Callable[[~.RunQueryRequest],
+ Awaitable[~.RunQueryResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_query" not in self._stubs:
+ self._stubs["run_query"] = self.grpc_channel.unary_stream(
+ "/google.firestore.v1.Firestore/RunQuery",
+ request_serializer=firestore.RunQueryRequest.serialize,
+ response_deserializer=firestore.RunQueryResponse.deserialize,
+ )
+ return self._stubs["run_query"]
+
+ @property
+ def partition_query(
+ self,
+ ) -> Callable[
+ [firestore.PartitionQueryRequest], Awaitable[firestore.PartitionQueryResponse]
+ ]:
+ r"""Return a callable for the partition query method over gRPC.
+
+ Partitions a query by returning partition cursors
+ that can be used to run the query in parallel. The
+ returned partition cursors are split points that can be
+ used by RunQuery as starting/end points for the query
+ results.
+
+ Returns:
+ Callable[[~.PartitionQueryRequest],
+ Awaitable[~.PartitionQueryResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "partition_query" not in self._stubs:
+ self._stubs["partition_query"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/PartitionQuery",
+ request_serializer=firestore.PartitionQueryRequest.serialize,
+ response_deserializer=firestore.PartitionQueryResponse.deserialize,
+ )
+ return self._stubs["partition_query"]
+
+ @property
+ def write(
+ self,
+ ) -> Callable[[firestore.WriteRequest], Awaitable[firestore.WriteResponse]]:
+ r"""Return a callable for the write method over gRPC.
+
+ Streams batches of document updates and deletes, in
+ order.
+
+ Returns:
+ Callable[[~.WriteRequest],
+ Awaitable[~.WriteResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "write" not in self._stubs:
+ self._stubs["write"] = self.grpc_channel.stream_stream(
+ "/google.firestore.v1.Firestore/Write",
+ request_serializer=firestore.WriteRequest.serialize,
+ response_deserializer=firestore.WriteResponse.deserialize,
+ )
+ return self._stubs["write"]
+
+ @property
+ def listen(
+ self,
+ ) -> Callable[[firestore.ListenRequest], Awaitable[firestore.ListenResponse]]:
+ r"""Return a callable for the listen method over gRPC.
+
+ Listens to changes.
+
+ Returns:
+ Callable[[~.ListenRequest],
+ Awaitable[~.ListenResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "listen" not in self._stubs:
+ self._stubs["listen"] = self.grpc_channel.stream_stream(
+ "/google.firestore.v1.Firestore/Listen",
+ request_serializer=firestore.ListenRequest.serialize,
+ response_deserializer=firestore.ListenResponse.deserialize,
+ )
+ return self._stubs["listen"]
+
+ @property
+ def list_collection_ids(
+ self,
+ ) -> Callable[
+ [firestore.ListCollectionIdsRequest],
+ Awaitable[firestore.ListCollectionIdsResponse],
+ ]:
+ r"""Return a callable for the list collection ids method over gRPC.
+
+ Lists all the collection IDs underneath a document.
+
+ Returns:
+ Callable[[~.ListCollectionIdsRequest],
+ Awaitable[~.ListCollectionIdsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_collection_ids" not in self._stubs:
+ self._stubs["list_collection_ids"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/ListCollectionIds",
+ request_serializer=firestore.ListCollectionIdsRequest.serialize,
+ response_deserializer=firestore.ListCollectionIdsResponse.deserialize,
+ )
+ return self._stubs["list_collection_ids"]
+
+ @property
+ def batch_write(
+ self,
+ ) -> Callable[
+ [firestore.BatchWriteRequest], Awaitable[firestore.BatchWriteResponse]
+ ]:
+ r"""Return a callable for the batch write method over gRPC.
+
+ Applies a batch of write operations.
+
+ The BatchWrite method does not apply the write operations
+ atomically and can apply them out of order. Method does not
+ allow more than one write per document. Each write succeeds or
+ fails independently. See the
+ [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for
+ the success status of each write.
+
+ If you require an atomically applied set of writes, use
+ [Commit][google.firestore.v1.Firestore.Commit] instead.
+
+ Returns:
+ Callable[[~.BatchWriteRequest],
+ Awaitable[~.BatchWriteResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_write" not in self._stubs:
+ self._stubs["batch_write"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/BatchWrite",
+ request_serializer=firestore.BatchWriteRequest.serialize,
+ response_deserializer=firestore.BatchWriteResponse.deserialize,
+ )
+ return self._stubs["batch_write"]
+
+ @property
+ def create_document(
+ self,
+ ) -> Callable[[firestore.CreateDocumentRequest], Awaitable[document.Document]]:
+ r"""Return a callable for the create document method over gRPC.
+
+ Creates a new document.
+
+ Returns:
+ Callable[[~.CreateDocumentRequest],
+ Awaitable[~.Document]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_document" not in self._stubs:
+ self._stubs["create_document"] = self.grpc_channel.unary_unary(
+ "/google.firestore.v1.Firestore/CreateDocument",
+ request_serializer=firestore.CreateDocumentRequest.serialize,
+ response_deserializer=document.Document.deserialize,
+ )
+ return self._stubs["create_document"]
+
+
+__all__ = ("FirestoreGrpcAsyncIOTransport",)
diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py
index 04485a84c2..f4719f7126 100644
--- a/google/cloud/firestore_v1/transaction.py
+++ b/google/cloud/firestore_v1/transaction.py
@@ -18,33 +18,36 @@
import random
import time
-import six
-
-from google.api_core import exceptions
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+
+from google.cloud.firestore_v1.base_transaction import (
+ _BaseTransactional,
+ BaseTransaction,
+ MAX_ATTEMPTS,
+ _CANT_BEGIN,
+ _CANT_ROLLBACK,
+ _CANT_COMMIT,
+ _WRITE_READ_ONLY,
+ _INITIAL_SLEEP,
+ _MAX_SLEEP,
+ _MULTIPLIER,
+ _EXCEED_ATTEMPTS_TEMPLATE,
+)
+
+from google.api_core import exceptions # type: ignore
from google.cloud.firestore_v1 import batch
-from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1.document import DocumentReference
+from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.query import Query
+# Types needed only for Type Hints
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
+from google.cloud.firestore_v1.types import CommitResponse
+from typing import Any, Callable, Generator, Optional
+
-MAX_ATTEMPTS = 5
-"""int: Default number of transaction attempts (with retries)."""
-_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}."
-_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}."
-_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back")
-_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed")
-_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction."
-_INITIAL_SLEEP = 1.0
-"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`."""
-_MAX_SLEEP = 30.0
-"""float: Eventual "max" sleep time. To be used in :func:`_sleep`."""
-_MULTIPLIER = 2.0
-"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`."""
-_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts."
-_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried."
-
-
-class Transaction(batch.WriteBatch):
+class Transaction(batch.WriteBatch, BaseTransaction):
"""Accumulate read-and-write operations to be sent in a transaction.
Args:
@@ -58,18 +61,16 @@ class Transaction(batch.WriteBatch):
:data:`False`.
"""
- def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False):
+ def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False) -> None:
super(Transaction, self).__init__(client)
- self._max_attempts = max_attempts
- self._read_only = read_only
- self._id = None
+ BaseTransaction.__init__(self, max_attempts, read_only)
- def _add_write_pbs(self, write_pbs):
+ def _add_write_pbs(self, write_pbs: list) -> None:
"""Add `Write`` protobufs to this transaction.
Args:
write_pbs (List[google.cloud.proto.firestore.v1.\
- write_pb2.Write]): A list of write protobufs to be added.
+ write.Write]): A list of write protobufs to be added.
Raises:
ValueError: If this transaction is read-only.
@@ -79,62 +80,7 @@ def _add_write_pbs(self, write_pbs):
super(Transaction, self)._add_write_pbs(write_pbs)
- def _options_protobuf(self, retry_id):
- """Convert the current object to protobuf.
-
- The ``retry_id`` value is used when retrying a transaction that
- failed (e.g. due to contention). It is intended to be the "first"
- transaction that failed (i.e. if multiple retries are needed).
-
- Args:
- retry_id (Union[bytes, NoneType]): Transaction ID of a transaction
- to be retried.
-
- Returns:
- Optional[google.cloud.firestore_v1.types.TransactionOptions]:
- The protobuf ``TransactionOptions`` if ``read_only==True`` or if
- there is a transaction ID to be retried, else :data:`None`.
-
- Raises:
- ValueError: If ``retry_id`` is not :data:`None` but the
- transaction is read-only.
- """
- if retry_id is not None:
- if self._read_only:
- raise ValueError(_CANT_RETRY_READ_ONLY)
-
- return types.TransactionOptions(
- read_write=types.TransactionOptions.ReadWrite(
- retry_transaction=retry_id
- )
- )
- elif self._read_only:
- return types.TransactionOptions(
- read_only=types.TransactionOptions.ReadOnly()
- )
- else:
- return None
-
- @property
- def in_progress(self):
- """Determine if this transaction has already begun.
-
- Returns:
- bool: Indicates if the transaction has started.
- """
- return self._id is not None
-
- @property
- def id(self):
- """Get the current transaction ID.
-
- Returns:
- Optional[bytes]: The transaction ID (or :data:`None` if the
- current transaction is not in progress).
- """
- return self._id
-
- def _begin(self, retry_id=None):
+ def _begin(self, retry_id: bytes = None) -> None:
"""Begin the transaction.
Args:
@@ -149,21 +95,15 @@ def _begin(self, retry_id=None):
raise ValueError(msg)
transaction_response = self._client._firestore_api.begin_transaction(
- self._client._database_string,
- options_=self._options_protobuf(retry_id),
+ request={
+ "database": self._client._database_string,
+ "options": self._options_protobuf(retry_id),
+ },
metadata=self._client._rpc_metadata,
)
self._id = transaction_response.transaction
- def _clean_up(self):
- """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``.
-
- This intended to occur on success or failure of the associated RPCs.
- """
- self._write_pbs = []
- self._id = None
-
- def _rollback(self):
+ def _rollback(self) -> None:
"""Roll back the transaction.
Raises:
@@ -175,18 +115,20 @@ def _rollback(self):
try:
# NOTE: The response is just ``google.protobuf.Empty``.
self._client._firestore_api.rollback(
- self._client._database_string,
- self._id,
+ request={
+ "database": self._client._database_string,
+ "transaction": self._id,
+ },
metadata=self._client._rpc_metadata,
)
finally:
self._clean_up()
- def _commit(self):
+ def _commit(self) -> list:
"""Transactionally commit the changes accumulated.
Returns:
- List[:class:`google.cloud.proto.firestore.v1.write_pb2.WriteResult`, ...]:
+ List[:class:`google.cloud.proto.firestore.v1.write.WriteResult`, ...]:
The write results corresponding to the changes committed, returned
in the same order as the changes were applied to this transaction.
A write result contains an ``update_time`` field.
@@ -202,39 +144,60 @@ def _commit(self):
self._clean_up()
return list(commit_response.write_results)
- def get_all(self, references):
+ def get_all(
+ self,
+ references: list,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[DocumentSnapshot, Any, None]:
"""Retrieves multiple documents from Firestore.
Args:
references (List[.DocumentReference, ...]): Iterable of document
references to be retrieved.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
Yields:
.DocumentSnapshot: The next document snapshot that fulfills the
query, or :data:`None` if the document does not exist.
"""
- return self._client.get_all(references, transaction=self)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ return self._client.get_all(references, transaction=self, **kwargs)
+
+ def get(
+ self,
+ ref_or_query,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ ) -> Generator[DocumentSnapshot, Any, None]:
+ """Retrieve a document or a query result from the database.
- def get(self, ref_or_query):
- """
- Retrieve a document or a query result from the database.
Args:
- ref_or_query The document references or query object to return.
+ ref_or_query: The document references or query object to return.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried. Defaults to a system-specified policy.
+ timeout (float): The timeout for this request. Defaults to a
+ system-specified value.
+
Yields:
.DocumentSnapshot: The next document snapshot that fulfills the
query, or :data:`None` if the document does not exist.
"""
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
if isinstance(ref_or_query, DocumentReference):
- return self._client.get_all([ref_or_query], transaction=self)
+ return self._client.get_all([ref_or_query], transaction=self, **kwargs)
elif isinstance(ref_or_query, Query):
- return ref_or_query.stream(transaction=self)
+ return ref_or_query.stream(transaction=self, **kwargs)
else:
raise ValueError(
'Value for argument "ref_or_query" must be a DocumentReference or a Query.'
)
-class _Transactional(object):
+class _Transactional(_BaseTransactional):
"""Provide a callable object to use as a transactional decorater.
This is surfaced via
@@ -245,19 +208,10 @@ class _Transactional(object):
A callable that should be run (and retried) in a transaction.
"""
- def __init__(self, to_wrap):
- self.to_wrap = to_wrap
- self.current_id = None
- """Optional[bytes]: The current transaction ID."""
- self.retry_id = None
- """Optional[bytes]: The ID of the first attempted transaction."""
-
- def _reset(self):
- """Unset the transaction IDs."""
- self.current_id = None
- self.retry_id = None
+ def __init__(self, to_wrap) -> None:
+ super(_Transactional, self).__init__(to_wrap)
- def _pre_commit(self, transaction, *args, **kwargs):
+ def _pre_commit(self, transaction: Transaction, *args, **kwargs) -> Any:
"""Begin transaction and call the wrapped callable.
If the callable raises an exception, the transaction will be rolled
@@ -295,7 +249,7 @@ def _pre_commit(self, transaction, *args, **kwargs):
transaction._rollback()
raise
- def _maybe_commit(self, transaction):
+ def _maybe_commit(self, transaction: Transaction) -> Optional[bool]:
"""Try to commit the transaction.
If the transaction is read-write and the ``Commit`` fails with the
@@ -323,7 +277,7 @@ def _maybe_commit(self, transaction):
else:
raise
- def __call__(self, transaction, *args, **kwargs):
+ def __call__(self, transaction: Transaction, *args, **kwargs):
"""Execute the wrapped callable within a transaction.
Args:
@@ -344,7 +298,7 @@ def __call__(self, transaction, *args, **kwargs):
"""
self._reset()
- for attempt in six.moves.xrange(transaction._max_attempts):
+ for attempt in range(transaction._max_attempts):
result = self._pre_commit(transaction, *args, **kwargs)
succeeded = self._maybe_commit(transaction)
if succeeded:
@@ -361,7 +315,7 @@ def __call__(self, transaction, *args, **kwargs):
raise ValueError(msg)
-def transactional(to_wrap):
+def transactional(to_wrap: Callable) -> _Transactional:
"""Decorate a callable so that it runs in a transaction.
Args:
@@ -376,7 +330,9 @@ def transactional(to_wrap):
return _Transactional(to_wrap)
-def _commit_with_retry(client, write_pbs, transaction_id):
+def _commit_with_retry(
+ client, write_pbs: list, transaction_id: bytes
+) -> CommitResponse:
"""Call ``Commit`` on the GAPIC client with retry / sleep.
Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level
@@ -388,7 +344,7 @@ def _commit_with_retry(client, write_pbs, transaction_id):
Args:
client (:class:`~google.cloud.firestore_v1.client.Client`):
A client with GAPIC client and configuration details.
- write_pbs (List[:class:`google.cloud.proto.firestore.v1.write_pb2.Write`, ...]):
+ write_pbs (List[:class:`google.cloud.proto.firestore.v1.write.Write`, ...]):
A ``Write`` protobuf instance to be committed.
transaction_id (bytes):
ID of an existing transaction that this commit will run in.
@@ -405,9 +361,11 @@ def _commit_with_retry(client, write_pbs, transaction_id):
while True:
try:
return client._firestore_api.commit(
- client._database_string,
- write_pbs,
- transaction=transaction_id,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": transaction_id,
+ },
metadata=client._rpc_metadata,
)
except exceptions.ServiceUnavailable:
@@ -417,7 +375,9 @@ def _commit_with_retry(client, write_pbs, transaction_id):
current_sleep = _sleep(current_sleep)
-def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER):
+def _sleep(
+ current_sleep: float, max_sleep: float = _MAX_SLEEP, multiplier: float = _MULTIPLIER
+) -> float:
"""Sleep and produce a new sleep time.
.. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\
diff --git a/google/cloud/firestore_v1/transforms.py b/google/cloud/firestore_v1/transforms.py
index 83b644608d..e9aa876063 100644
--- a/google/cloud/firestore_v1/transforms.py
+++ b/google/cloud/firestore_v1/transforms.py
@@ -20,7 +20,7 @@ class Sentinel(object):
__slots__ = ("description",)
- def __init__(self, description):
+ def __init__(self, description) -> None:
self.description = description
def __repr__(self):
@@ -44,7 +44,7 @@ class _ValueList(object):
slots = ("_values",)
- def __init__(self, values):
+ def __init__(self, values) -> None:
if not isinstance(values, (list, tuple)):
raise ValueError("'values' must be a list or tuple.")
@@ -72,7 +72,7 @@ class ArrayUnion(_ValueList):
"""Field transform: appends missing values to an array field.
See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.append_missing_elements
+ https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1#google.cloud.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1.ArrayValue.google.cloud.firestore.v1.DocumentTransform.FieldTransform.append_missing_elements
Args:
values (List | Tuple): values to append.
@@ -83,7 +83,7 @@ class ArrayRemove(_ValueList):
"""Field transform: remove values from an array field.
See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.remove_all_from_array
+ https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1#google.cloud.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1.ArrayValue.google.cloud.firestore.v1.DocumentTransform.FieldTransform.remove_all_from_array
Args:
values (List | Tuple): values to remove.
@@ -97,7 +97,7 @@ class _NumericValue(object):
value (int | float): value held in the helper.
"""
- def __init__(self, value):
+ def __init__(self, value) -> None:
if not isinstance(value, (int, float)):
raise ValueError("Pass an integer / float value.")
@@ -122,7 +122,7 @@ class Increment(_NumericValue):
"""Field transform: increment a numeric field with specified value.
See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.increment
+ https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1#google.cloud.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1.ArrayValue.google.cloud.firestore.v1.DocumentTransform.FieldTransform.increment
Args:
value (int | float): value used to increment the field.
@@ -133,7 +133,7 @@ class Maximum(_NumericValue):
"""Field transform: bound numeric field with specified value.
See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.maximum
+ https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1#google.cloud.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1.ArrayValue.google.cloud.firestore.v1.DocumentTransform.FieldTransform.maximum
Args:
value (int | float): value used to bound the field.
@@ -144,7 +144,7 @@ class Minimum(_NumericValue):
"""Field transform: bound numeric field with specified value.
See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.minimum
+ https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1#google.cloud.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1.ArrayValue.google.cloud.firestore.v1.DocumentTransform.FieldTransform.minimum
Args:
value (int | float): value used to bound the field.
diff --git a/google/cloud/firestore_v1/types.py b/google/cloud/firestore_v1/types.py
deleted file mode 100644
index c4e7c35078..0000000000
--- a/google/cloud/firestore_v1/types.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-import sys
-
-from google.api import http_pb2
-from google.protobuf import any_pb2
-from google.protobuf import descriptor_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import struct_pb2
-from google.protobuf import timestamp_pb2
-from google.protobuf import wrappers_pb2
-from google.rpc import status_pb2
-from google.type import latlng_pb2
-
-from google.api_core.protobuf_helpers import get_messages
-from google.cloud.firestore_v1.proto import common_pb2
-from google.cloud.firestore_v1.proto import document_pb2
-from google.cloud.firestore_v1.proto import firestore_pb2
-from google.cloud.firestore_v1.proto import query_pb2
-from google.cloud.firestore_v1.proto import write_pb2
-
-
-_shared_modules = [
- http_pb2,
- any_pb2,
- descriptor_pb2,
- empty_pb2,
- struct_pb2,
- timestamp_pb2,
- wrappers_pb2,
- status_pb2,
- latlng_pb2,
-]
-
-_local_modules = [common_pb2, document_pb2, firestore_pb2, query_pb2, write_pb2]
-
-names = []
-
-for module in _shared_modules:
- for name, message in get_messages(module).items():
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-for module in _local_modules:
- for name, message in get_messages(module).items():
- message.__module__ = "google.cloud.firestore_v1.types"
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-__all__ = tuple(sorted(names))
diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py
new file mode 100644
index 0000000000..50f61964c8
--- /dev/null
+++ b/google/cloud/firestore_v1/types/__init__.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .common import (
+ DocumentMask,
+ Precondition,
+ TransactionOptions,
+)
+from .document import (
+ Document,
+ Value,
+ ArrayValue,
+ MapValue,
+)
+from .query import (
+ StructuredQuery,
+ Cursor,
+)
+from .write import (
+ Write,
+ DocumentTransform,
+ WriteResult,
+ DocumentChange,
+ DocumentDelete,
+ DocumentRemove,
+ ExistenceFilter,
+)
+from .firestore import (
+ GetDocumentRequest,
+ ListDocumentsRequest,
+ ListDocumentsResponse,
+ CreateDocumentRequest,
+ UpdateDocumentRequest,
+ DeleteDocumentRequest,
+ BatchGetDocumentsRequest,
+ BatchGetDocumentsResponse,
+ BeginTransactionRequest,
+ BeginTransactionResponse,
+ CommitRequest,
+ CommitResponse,
+ RollbackRequest,
+ RunQueryRequest,
+ RunQueryResponse,
+ PartitionQueryRequest,
+ PartitionQueryResponse,
+ WriteRequest,
+ WriteResponse,
+ ListenRequest,
+ ListenResponse,
+ Target,
+ TargetChange,
+ ListCollectionIdsRequest,
+ ListCollectionIdsResponse,
+ BatchWriteRequest,
+ BatchWriteResponse,
+)
+
+
+__all__ = (
+ "DocumentMask",
+ "Precondition",
+ "TransactionOptions",
+ "Document",
+ "Value",
+ "ArrayValue",
+ "MapValue",
+ "StructuredQuery",
+ "Cursor",
+ "Write",
+ "DocumentTransform",
+ "WriteResult",
+ "DocumentChange",
+ "DocumentDelete",
+ "DocumentRemove",
+ "ExistenceFilter",
+ "GetDocumentRequest",
+ "ListDocumentsRequest",
+ "ListDocumentsResponse",
+ "CreateDocumentRequest",
+ "UpdateDocumentRequest",
+ "DeleteDocumentRequest",
+ "BatchGetDocumentsRequest",
+ "BatchGetDocumentsResponse",
+ "BeginTransactionRequest",
+ "BeginTransactionResponse",
+ "CommitRequest",
+ "CommitResponse",
+ "RollbackRequest",
+ "RunQueryRequest",
+ "RunQueryResponse",
+ "PartitionQueryRequest",
+ "PartitionQueryResponse",
+ "WriteRequest",
+ "WriteResponse",
+ "ListenRequest",
+ "ListenResponse",
+ "Target",
+ "TargetChange",
+ "ListCollectionIdsRequest",
+ "ListCollectionIdsResponse",
+ "BatchWriteRequest",
+ "BatchWriteResponse",
+)
diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py
new file mode 100644
index 0000000000..b03242a4a8
--- /dev/null
+++ b/google/cloud/firestore_v1/types/common.py
@@ -0,0 +1,112 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.v1",
+ manifest={"DocumentMask", "Precondition", "TransactionOptions",},
+)
+
+
+class DocumentMask(proto.Message):
+ r"""A set of field paths on a document. Used to restrict a get or update
+ operation on a document to a subset of its fields. This is different
+ from standard field masks, as this is always scoped to a
+ [Document][google.firestore.v1.Document], and takes in account the
+ dynamic nature of [Value][google.firestore.v1.Value].
+
+ Attributes:
+ field_paths (Sequence[str]):
+ The list of field paths in the mask. See
+ [Document.fields][google.firestore.v1.Document.fields] for a
+ field path syntax reference.
+ """
+
+ field_paths = proto.RepeatedField(proto.STRING, number=1)
+
+
+class Precondition(proto.Message):
+ r"""A precondition on a document, used for conditional
+ operations.
+
+ Attributes:
+ exists (bool):
+ When set to ``true``, the target document must exist. When
+ set to ``false``, the target document must not exist.
+ update_time (~.timestamp.Timestamp):
+ When set, the target document must exist and
+ have been last updated at that time.
+ """
+
+ exists = proto.Field(proto.BOOL, number=1, oneof="condition_type")
+
+ update_time = proto.Field(
+ proto.MESSAGE, number=2, oneof="condition_type", message=timestamp.Timestamp,
+ )
+
+
+class TransactionOptions(proto.Message):
+ r"""Options for creating a new transaction.
+
+ Attributes:
+ read_only (~.common.TransactionOptions.ReadOnly):
+ The transaction can only be used for read
+ operations.
+ read_write (~.common.TransactionOptions.ReadWrite):
+ The transaction can be used for both read and
+ write operations.
+ """
+
+ class ReadWrite(proto.Message):
+ r"""Options for a transaction that can be used to read and write
+ documents.
+
+ Attributes:
+ retry_transaction (bytes):
+ An optional transaction to retry.
+ """
+
+ retry_transaction = proto.Field(proto.BYTES, number=1)
+
+ class ReadOnly(proto.Message):
+ r"""Options for a transaction that can only be used to read
+ documents.
+
+ Attributes:
+ read_time (~.timestamp.Timestamp):
+ Reads documents at the given time.
+ This may not be older than 60 seconds.
+ """
+
+ read_time = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ oneof="consistency_selector",
+ message=timestamp.Timestamp,
+ )
+
+ read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,)
+
+ read_write = proto.Field(proto.MESSAGE, number=3, oneof="mode", message=ReadWrite,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py
new file mode 100644
index 0000000000..7104bfc61a
--- /dev/null
+++ b/google/cloud/firestore_v1/types/document.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.protobuf import struct_pb2 as struct # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.type import latlng_pb2 as latlng # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.v1",
+ manifest={"Document", "Value", "ArrayValue", "MapValue",},
+)
+
+
+class Document(proto.Message):
+ r"""A Firestore document.
+ Must not exceed 1 MiB - 4 bytes.
+
+ Attributes:
+ name (str):
+ The resource name of the document, for example
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ fields (Sequence[~.document.Document.FieldsEntry]):
+ The document's fields.
+
+ The map keys represent field names.
+
+ A simple field name contains only characters ``a`` to ``z``,
+ ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start
+ with ``0`` to ``9``. For example, ``foo_bar_17``.
+
+ Field names matching the regular expression ``__.*__`` are
+ reserved. Reserved field names are forbidden except in
+ certain documented contexts. The map keys, represented as
+ UTF-8, must not exceed 1,500 bytes and cannot be empty.
+
+ Field paths may be used in other contexts to refer to
+ structured fields defined here. For ``map_value``, the field
+ path is represented by the simple or quoted field names of
+ the containing fields, delimited by ``.``. For example, the
+ structured field
+ ``"foo" : { map_value: { "x&y" : { string_value: "hello" }}}``
+ would be represented by the field path ``foo.x&y``.
+
+ Within a field path, a quoted field name starts and ends
+ with :literal:`\`` and may contain any character. Some
+ characters, including :literal:`\``, must be escaped using a
+ ``\``. For example, :literal:`\`x&y\`` represents ``x&y``
+ and :literal:`\`bak\`tik\`` represents :literal:`bak`tik`.
+ create_time (~.timestamp.Timestamp):
+ Output only. The time at which the document was created.
+
+ This value increases monotonically when a document is
+ deleted then recreated. It can also be compared to values
+ from other documents and the ``read_time`` of a query.
+ update_time (~.timestamp.Timestamp):
+ Output only. The time at which the document was last
+ changed.
+
+ This value is initially set to the ``create_time`` then
+ increases monotonically with each change to the document. It
+ can also be compared to values from other documents and the
+ ``read_time`` of a query.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ fields = proto.MapField(proto.STRING, proto.MESSAGE, number=2, message="Value",)
+
+ create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
+
+ update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
+
+
+class Value(proto.Message):
+ r"""A message that can hold any of the supported value types.
+
+ Attributes:
+ null_value (~.struct.NullValue):
+ A null value.
+ boolean_value (bool):
+ A boolean value.
+ integer_value (int):
+ An integer value.
+ double_value (float):
+ A double value.
+ timestamp_value (~.timestamp.Timestamp):
+ A timestamp value.
+ Precise only to microseconds. When stored, any
+ additional precision is rounded down.
+ string_value (str):
+ A string value.
+ The string, represented as UTF-8, must not
+ exceed 1 MiB - 89 bytes. Only the first 1,500
+ bytes of the UTF-8 representation are considered
+ by queries.
+ bytes_value (bytes):
+ A bytes value.
+ Must not exceed 1 MiB - 89 bytes.
+ Only the first 1,500 bytes are considered by
+ queries.
+ reference_value (str):
+ A reference to a document. For example:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ geo_point_value (~.latlng.LatLng):
+ A geo point value representing a point on the
+ surface of Earth.
+ array_value (~.document.ArrayValue):
+ An array value.
+ Cannot directly contain another array value,
+ though can contain an map which contains another
+ array.
+ map_value (~.document.MapValue):
+ A map value.
+ """
+
+ null_value = proto.Field(
+ proto.ENUM, number=11, oneof="value_type", enum=struct.NullValue,
+ )
+
+ boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type")
+
+ integer_value = proto.Field(proto.INT64, number=2, oneof="value_type")
+
+ double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type")
+
+ timestamp_value = proto.Field(
+ proto.MESSAGE, number=10, oneof="value_type", message=timestamp.Timestamp,
+ )
+
+ string_value = proto.Field(proto.STRING, number=17, oneof="value_type")
+
+ bytes_value = proto.Field(proto.BYTES, number=18, oneof="value_type")
+
+ reference_value = proto.Field(proto.STRING, number=5, oneof="value_type")
+
+ geo_point_value = proto.Field(
+ proto.MESSAGE, number=8, oneof="value_type", message=latlng.LatLng,
+ )
+
+ array_value = proto.Field(
+ proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue",
+ )
+
+ map_value = proto.Field(
+ proto.MESSAGE, number=6, oneof="value_type", message="MapValue",
+ )
+
+
+class ArrayValue(proto.Message):
+ r"""An array value.
+
+ Attributes:
+ values (Sequence[~.document.Value]):
+ Values in the array.
+ """
+
+ values = proto.RepeatedField(proto.MESSAGE, number=1, message=Value,)
+
+
+class MapValue(proto.Message):
+ r"""A map value.
+
+ Attributes:
+ fields (Sequence[~.document.MapValue.FieldsEntry]):
+ The map's fields.
+
+ The map keys represent field names. Field names matching the
+ regular expression ``__.*__`` are reserved. Reserved field
+ names are forbidden except in certain documented contexts.
+ The map keys, represented as UTF-8, must not exceed 1,500
+ bytes and cannot be empty.
+ """
+
+ fields = proto.MapField(proto.STRING, proto.MESSAGE, number=1, message=Value,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py
new file mode 100644
index 0000000000..345d67f709
--- /dev/null
+++ b/google/cloud/firestore_v1/types/firestore.py
@@ -0,0 +1,1078 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import query as gf_query
+from google.cloud.firestore_v1.types import write
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.rpc import status_pb2 as gr_status # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.v1",
+ manifest={
+ "GetDocumentRequest",
+ "ListDocumentsRequest",
+ "ListDocumentsResponse",
+ "CreateDocumentRequest",
+ "UpdateDocumentRequest",
+ "DeleteDocumentRequest",
+ "BatchGetDocumentsRequest",
+ "BatchGetDocumentsResponse",
+ "BeginTransactionRequest",
+ "BeginTransactionResponse",
+ "CommitRequest",
+ "CommitResponse",
+ "RollbackRequest",
+ "RunQueryRequest",
+ "RunQueryResponse",
+ "PartitionQueryRequest",
+ "PartitionQueryResponse",
+ "WriteRequest",
+ "WriteResponse",
+ "ListenRequest",
+ "ListenResponse",
+ "Target",
+ "TargetChange",
+ "ListCollectionIdsRequest",
+ "ListCollectionIdsResponse",
+ "BatchWriteRequest",
+ "BatchWriteResponse",
+ },
+)
+
+
+class GetDocumentRequest(proto.Message):
+ r"""The request for
+ [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument].
+
+ Attributes:
+ name (str):
+ Required. The resource name of the Document to get. In the
+ format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ mask (~.common.DocumentMask):
+ The fields to return. If not set, returns all
+ fields.
+ If the document has a field that is not present
+ in this mask, that field will not be returned in
+ the response.
+ transaction (bytes):
+ Reads the document in a transaction.
+ read_time (~.timestamp.Timestamp):
+ Reads the version of the document at the
+ given time. This may not be older than 270
+ seconds.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ mask = proto.Field(proto.MESSAGE, number=2, message=common.DocumentMask,)
+
+ transaction = proto.Field(proto.BYTES, number=3, oneof="consistency_selector")
+
+ read_time = proto.Field(
+ proto.MESSAGE,
+ number=5,
+ oneof="consistency_selector",
+ message=timestamp.Timestamp,
+ )
+
+
+class ListDocumentsRequest(proto.Message):
+ r"""The request for
+ [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
+
+ Attributes:
+ parent (str):
+ Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents``
+ or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ collection_id (str):
+ Required. The collection ID, relative to ``parent``, to
+ list. For example: ``chatrooms`` or ``messages``.
+ page_size (int):
+ The maximum number of documents to return.
+ page_token (str):
+ The ``next_page_token`` value returned from a previous List
+ request, if any.
+ order_by (str):
+ The order to sort results by. For example:
+ ``priority desc, name``.
+ mask (~.common.DocumentMask):
+ The fields to return. If not set, returns all
+ fields.
+ If a document has a field that is not present in
+ this mask, that field will not be returned in
+ the response.
+ transaction (bytes):
+ Reads documents in a transaction.
+ read_time (~.timestamp.Timestamp):
+ Reads documents as they were at the given
+ time. This may not be older than 270 seconds.
+ show_missing (bool):
+ If the list should show missing documents. A missing
+ document is a document that does not exist but has
+ sub-documents. These documents will be returned with a key
+ but will not have fields,
+ [Document.create_time][google.firestore.v1.Document.create_time],
+ or
+ [Document.update_time][google.firestore.v1.Document.update_time]
+ set.
+
+ Requests with ``show_missing`` may not specify ``where`` or
+ ``order_by``.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ collection_id = proto.Field(proto.STRING, number=2)
+
+ page_size = proto.Field(proto.INT32, number=3)
+
+ page_token = proto.Field(proto.STRING, number=4)
+
+ order_by = proto.Field(proto.STRING, number=6)
+
+ mask = proto.Field(proto.MESSAGE, number=7, message=common.DocumentMask,)
+
+ transaction = proto.Field(proto.BYTES, number=8, oneof="consistency_selector")
+
+ read_time = proto.Field(
+ proto.MESSAGE,
+ number=10,
+ oneof="consistency_selector",
+ message=timestamp.Timestamp,
+ )
+
+ show_missing = proto.Field(proto.BOOL, number=12)
+
+
+class ListDocumentsResponse(proto.Message):
+ r"""The response for
+ [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
+
+ Attributes:
+ documents (Sequence[~.gf_document.Document]):
+ The Documents found.
+ next_page_token (str):
+ The next page token.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ documents = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=gf_document.Document,
+ )
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+
+class CreateDocumentRequest(proto.Message):
+ r"""The request for
+ [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument].
+
+ Attributes:
+ parent (str):
+ Required. The parent resource. For example:
+ ``projects/{project_id}/databases/{database_id}/documents``
+ or
+ ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}``
+ collection_id (str):
+ Required. The collection ID, relative to ``parent``, to
+ list. For example: ``chatrooms``.
+ document_id (str):
+ The client-assigned document ID to use for
+ this document.
+ Optional. If not specified, an ID will be
+ assigned by the service.
+ document (~.gf_document.Document):
+ Required. The document to create. ``name`` must not be set.
+ mask (~.common.DocumentMask):
+ The fields to return. If not set, returns all
+ fields.
+ If the document has a field that is not present
+ in this mask, that field will not be returned in
+ the response.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ collection_id = proto.Field(proto.STRING, number=2)
+
+ document_id = proto.Field(proto.STRING, number=3)
+
+ document = proto.Field(proto.MESSAGE, number=4, message=gf_document.Document,)
+
+ mask = proto.Field(proto.MESSAGE, number=5, message=common.DocumentMask,)
+
+
+class UpdateDocumentRequest(proto.Message):
+ r"""The request for
+ [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument].
+
+ Attributes:
+ document (~.gf_document.Document):
+ Required. The updated document.
+ Creates the document if it does not already
+ exist.
+ update_mask (~.common.DocumentMask):
+ The fields to update.
+ None of the field paths in the mask may contain
+ a reserved name.
+ If the document exists on the server and has
+ fields not referenced in the mask, they are left
+ unchanged.
+ Fields referenced in the mask, but not present
+ in the input document, are deleted from the
+ document on the server.
+ mask (~.common.DocumentMask):
+ The fields to return. If not set, returns all
+ fields.
+ If the document has a field that is not present
+ in this mask, that field will not be returned in
+ the response.
+ current_document (~.common.Precondition):
+ An optional precondition on the document.
+ The request will fail if this is set and not met
+ by the target document.
+ """
+
+ document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,)
+
+ update_mask = proto.Field(proto.MESSAGE, number=2, message=common.DocumentMask,)
+
+ mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,)
+
+ current_document = proto.Field(
+ proto.MESSAGE, number=4, message=common.Precondition,
+ )
+
+
+class DeleteDocumentRequest(proto.Message):
+ r"""The request for
+ [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument].
+
+ Attributes:
+ name (str):
+ Required. The resource name of the Document to delete. In
+ the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ current_document (~.common.Precondition):
+ An optional precondition on the document.
+ The request will fail if this is set and not met
+ by the target document.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ current_document = proto.Field(
+ proto.MESSAGE, number=2, message=common.Precondition,
+ )
+
+
+class BatchGetDocumentsRequest(proto.Message):
+ r"""The request for
+ [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ documents (Sequence[str]):
+ The names of the documents to retrieve. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ The request will fail if any of the document is not a child
+ resource of the given ``database``. Duplicate names will be
+ elided.
+ mask (~.common.DocumentMask):
+ The fields to return. If not set, returns all
+ fields.
+ If a document has a field that is not present in
+ this mask, that field will not be returned in
+ the response.
+ transaction (bytes):
+ Reads documents in a transaction.
+ new_transaction (~.common.TransactionOptions):
+ Starts a new transaction and reads the
+ documents. Defaults to a read-only transaction.
+ The new transaction ID will be returned as the
+ first response in the stream.
+ read_time (~.timestamp.Timestamp):
+ Reads documents as they were at the given
+ time. This may not be older than 270 seconds.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ documents = proto.RepeatedField(proto.STRING, number=2)
+
+ mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,)
+
+ transaction = proto.Field(proto.BYTES, number=4, oneof="consistency_selector")
+
+ new_transaction = proto.Field(
+ proto.MESSAGE,
+ number=5,
+ oneof="consistency_selector",
+ message=common.TransactionOptions,
+ )
+
+ read_time = proto.Field(
+ proto.MESSAGE,
+ number=7,
+ oneof="consistency_selector",
+ message=timestamp.Timestamp,
+ )
+
+
+class BatchGetDocumentsResponse(proto.Message):
+ r"""The streamed response for
+ [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
+
+ Attributes:
+ found (~.gf_document.Document):
+ A document that was requested.
+ missing (str):
+ A document name that was requested but does not exist. In
+ the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ transaction (bytes):
+ The transaction that was started as part of this request.
+ Will only be set in the first response, and only if
+ [BatchGetDocumentsRequest.new_transaction][google.firestore.v1.BatchGetDocumentsRequest.new_transaction]
+ was set in the request.
+ read_time (~.timestamp.Timestamp):
+ The time at which the document was read. This may be
+ monotically increasing, in this case the previous documents
+ in the result stream are guaranteed not to have changed
+ between their read_time and this one.
+ """
+
+ found = proto.Field(
+ proto.MESSAGE, number=1, oneof="result", message=gf_document.Document,
+ )
+
+ missing = proto.Field(proto.STRING, number=2, oneof="result")
+
+ transaction = proto.Field(proto.BYTES, number=3)
+
+ read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
+
+
+class BeginTransactionRequest(proto.Message):
+ r"""The request for
+ [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ options (~.common.TransactionOptions):
+ The options for the transaction.
+ Defaults to a read-write transaction.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ options = proto.Field(proto.MESSAGE, number=2, message=common.TransactionOptions,)
+
+
+class BeginTransactionResponse(proto.Message):
+ r"""The response for
+ [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
+
+ Attributes:
+ transaction (bytes):
+ The transaction that was started.
+ """
+
+ transaction = proto.Field(proto.BYTES, number=1)
+
+
+class CommitRequest(proto.Message):
+ r"""The request for
+ [Firestore.Commit][google.firestore.v1.Firestore.Commit].
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ writes (Sequence[~.write.Write]):
+ The writes to apply.
+ Always executed atomically and in order.
+ transaction (bytes):
+ If set, applies all writes in this
+ transaction, and commits it.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ writes = proto.RepeatedField(proto.MESSAGE, number=2, message=write.Write,)
+
+ transaction = proto.Field(proto.BYTES, number=3)
+
+
+class CommitResponse(proto.Message):
+ r"""The response for
+ [Firestore.Commit][google.firestore.v1.Firestore.Commit].
+
+ Attributes:
+ write_results (Sequence[~.write.WriteResult]):
+ The result of applying the writes.
+ This i-th write result corresponds to the i-th
+ write in the request.
+ commit_time (~.timestamp.Timestamp):
+ The time at which the commit occurred. Any read with an
+ equal or greater ``read_time`` is guaranteed to see the
+ effects of the commit.
+ """
+
+ write_results = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=write.WriteResult,
+ )
+
+ commit_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+
+class RollbackRequest(proto.Message):
+ r"""The request for
+ [Firestore.Rollback][google.firestore.v1.Firestore.Rollback].
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ transaction (bytes):
+ Required. The transaction to roll back.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ transaction = proto.Field(proto.BYTES, number=2)
+
+
+class RunQueryRequest(proto.Message):
+ r"""The request for
+ [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
+
+ Attributes:
+ parent (str):
+ Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents``
+ or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ structured_query (~.gf_query.StructuredQuery):
+ A structured query.
+ transaction (bytes):
+ Reads documents in a transaction.
+ new_transaction (~.common.TransactionOptions):
+ Starts a new transaction and reads the
+ documents. Defaults to a read-only transaction.
+ The new transaction ID will be returned as the
+ first response in the stream.
+ read_time (~.timestamp.Timestamp):
+ Reads documents as they were at the given
+ time. This may not be older than 270 seconds.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ structured_query = proto.Field(
+ proto.MESSAGE, number=2, oneof="query_type", message=gf_query.StructuredQuery,
+ )
+
+ transaction = proto.Field(proto.BYTES, number=5, oneof="consistency_selector")
+
+ new_transaction = proto.Field(
+ proto.MESSAGE,
+ number=6,
+ oneof="consistency_selector",
+ message=common.TransactionOptions,
+ )
+
+ read_time = proto.Field(
+ proto.MESSAGE,
+ number=7,
+ oneof="consistency_selector",
+ message=timestamp.Timestamp,
+ )
+
+
+class RunQueryResponse(proto.Message):
+ r"""The response for
+ [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
+
+ Attributes:
+ transaction (bytes):
+ The transaction that was started as part of this request.
+ Can only be set in the first response, and only if
+ [RunQueryRequest.new_transaction][google.firestore.v1.RunQueryRequest.new_transaction]
+ was set in the request. If set, no other fields will be set
+ in this response.
+ document (~.gf_document.Document):
+ A query result.
+ Not set when reporting partial progress.
+ read_time (~.timestamp.Timestamp):
+ The time at which the document was read. This may be
+ monotonically increasing; in this case, the previous
+ documents in the result stream are guaranteed not to have
+ changed between their ``read_time`` and this one.
+
+ If the query returns no results, a response with
+ ``read_time`` and no ``document`` will be sent, and this
+ represents the time at which the query was run.
+ skipped_results (int):
+ The number of results that have been skipped
+ due to an offset between the last response and
+ the current response.
+ """
+
+ transaction = proto.Field(proto.BYTES, number=2)
+
+ document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,)
+
+ read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
+
+ skipped_results = proto.Field(proto.INT32, number=4)
+
+
+class PartitionQueryRequest(proto.Message):
+ r"""The request for
+ [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
+
+ Attributes:
+ parent (str):
+ Required. The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents``.
+ Document resource names are not supported; only database
+ resource names can be specified.
+ structured_query (~.gf_query.StructuredQuery):
+ A structured query.
+ Query must specify collection with all
+ descendants and be ordered by name ascending.
+ Other filters, order bys, limits, offsets, and
+ start/end cursors are not supported.
+ partition_count (int):
+ The desired maximum number of partition
+ points. The partitions may be returned across
+ multiple pages of results. The number must be
+ positive. The actual number of partitions
+ returned may be fewer.
+
+ For example, this may be set to one fewer than
+ the number of parallel queries to be run, or in
+ running a data pipeline job, one fewer than the
+ number of workers or compute instances
+ available.
+ page_token (str):
+ The ``next_page_token`` value returned from a previous call
+ to PartitionQuery that may be used to get an additional set
+ of results. There are no ordering guarantees between sets of
+ results. Thus, using multiple sets of results will require
+ merging the different result sets.
+
+ For example, two subsequent calls using a page_token may
+ return:
+
+ - cursor B, cursor M, cursor Q
+ - cursor A, cursor U, cursor W
+
+ To obtain a complete result set ordered with respect to the
+ results of the query supplied to PartitionQuery, the results
+ sets should be merged: cursor A, cursor B, cursor M, cursor
+ Q, cursor U, cursor W
+ page_size (int):
+ The maximum number of partitions to return in this call,
+ subject to ``partition_count``.
+
+ For example, if ``partition_count`` = 10 and ``page_size`` =
+ 8, the first call to PartitionQuery will return up to 8
+ partitions and a ``next_page_token`` if more results exist.
+ A second call to PartitionQuery will return up to 2
+ partitions, to complete the total of 10 specified in
+ ``partition_count``.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ structured_query = proto.Field(
+ proto.MESSAGE, number=2, oneof="query_type", message=gf_query.StructuredQuery,
+ )
+
+ partition_count = proto.Field(proto.INT64, number=3)
+
+ page_token = proto.Field(proto.STRING, number=4)
+
+ page_size = proto.Field(proto.INT32, number=5)
+
+
+class PartitionQueryResponse(proto.Message):
+ r"""The response for
+ [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
+
+ Attributes:
+ partitions (Sequence[~.gf_query.Cursor]):
+ Partition results. Each partition is a split point that can
+ be used by RunQuery as a starting or end point for the query
+ results. The RunQuery requests must be made with the same
+ query supplied to this PartitionQuery request. The partition
+ cursors will be ordered according to same ordering as the
+ results of the query supplied to PartitionQuery.
+
+ For example, if a PartitionQuery request returns partition
+ cursors A and B, running the following three queries will
+ return the entire result set of the original query:
+
+ - query, end_at A
+ - query, start_at A, end_at B
+ - query, start_at B
+
+ An empty result may indicate that the query has too few
+ results to be partitioned.
+ next_page_token (str):
+ A page token that may be used to request an additional set
+ of results, up to the number specified by
+ ``partition_count`` in the PartitionQuery request. If blank,
+ there are no more results.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ partitions = proto.RepeatedField(proto.MESSAGE, number=1, message=gf_query.Cursor,)
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+
+class WriteRequest(proto.Message):
+ r"""The request for
+ [Firestore.Write][google.firestore.v1.Firestore.Write].
+
+ The first request creates a stream, or resumes an existing one from
+ a token.
+
+ When creating a new stream, the server replies with a response
+ containing only an ID and a token, to use in the next request.
+
+ When resuming a stream, the server first streams any responses later
+ than the given token, then a response containing only an up-to-date
+ token, to use in the next request.
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``. This is
+ only required in the first message.
+ stream_id (str):
+ The ID of the write stream to resume.
+ This may only be set in the first message. When
+ left empty, a new write stream will be created.
+ writes (Sequence[~.write.Write]):
+ The writes to apply.
+ Always executed atomically and in order.
+ This must be empty on the first request.
+ This may be empty on the last request.
+ This must not be empty on all other requests.
+ stream_token (bytes):
+ A stream token that was previously sent by the server.
+
+ The client should set this field to the token from the most
+ recent [WriteResponse][google.firestore.v1.WriteResponse] it
+ has received. This acknowledges that the client has received
+ responses up to this token. After sending this token,
+ earlier tokens may not be used anymore.
+
+ The server may close the stream if there are too many
+ unacknowledged responses.
+
+ Leave this field unset when creating a new stream. To resume
+ a stream at a specific point, set this field and the
+ ``stream_id`` field.
+
+ Leave this field unset when creating a new stream.
+ labels (Sequence[~.firestore.WriteRequest.LabelsEntry]):
+ Labels associated with this write request.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ stream_id = proto.Field(proto.STRING, number=2)
+
+ writes = proto.RepeatedField(proto.MESSAGE, number=3, message=write.Write,)
+
+ stream_token = proto.Field(proto.BYTES, number=4)
+
+ labels = proto.MapField(proto.STRING, proto.STRING, number=5)
+
+
+class WriteResponse(proto.Message):
+ r"""The response for
+ [Firestore.Write][google.firestore.v1.Firestore.Write].
+
+ Attributes:
+ stream_id (str):
+ The ID of the stream.
+ Only set on the first message, when a new stream
+ was created.
+ stream_token (bytes):
+ A token that represents the position of this
+ response in the stream. This can be used by a
+ client to resume the stream at this point.
+ This field is always set.
+ write_results (Sequence[~.write.WriteResult]):
+ The result of applying the writes.
+ This i-th write result corresponds to the i-th
+ write in the request.
+ commit_time (~.timestamp.Timestamp):
+ The time at which the commit occurred. Any read with an
+ equal or greater ``read_time`` is guaranteed to see the
+ effects of the write.
+ """
+
+ stream_id = proto.Field(proto.STRING, number=1)
+
+ stream_token = proto.Field(proto.BYTES, number=2)
+
+ write_results = proto.RepeatedField(
+ proto.MESSAGE, number=3, message=write.WriteResult,
+ )
+
+ commit_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
+
+
+class ListenRequest(proto.Message):
+ r"""A request for
+ [Firestore.Listen][google.firestore.v1.Firestore.Listen]
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ add_target (~.firestore.Target):
+ A target to add to this stream.
+ remove_target (int):
+ The ID of a target to remove from this
+ stream.
+ labels (Sequence[~.firestore.ListenRequest.LabelsEntry]):
+ Labels associated with this target change.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ add_target = proto.Field(
+ proto.MESSAGE, number=2, oneof="target_change", message="Target",
+ )
+
+ remove_target = proto.Field(proto.INT32, number=3, oneof="target_change")
+
+ labels = proto.MapField(proto.STRING, proto.STRING, number=4)
+
+
+class ListenResponse(proto.Message):
+ r"""The response for
+ [Firestore.Listen][google.firestore.v1.Firestore.Listen].
+
+ Attributes:
+ target_change (~.firestore.TargetChange):
+ Targets have changed.
+ document_change (~.write.DocumentChange):
+ A [Document][google.firestore.v1.Document] has changed.
+ document_delete (~.write.DocumentDelete):
+ A [Document][google.firestore.v1.Document] has been deleted.
+ document_remove (~.write.DocumentRemove):
+ A [Document][google.firestore.v1.Document] has been removed
+ from a target (because it is no longer relevant to that
+ target).
+ filter (~.write.ExistenceFilter):
+ A filter to apply to the set of documents
+ previously returned for the given target.
+
+ Returned when documents may have been removed
+ from the given target, but the exact documents
+ are unknown.
+ """
+
+ target_change = proto.Field(
+ proto.MESSAGE, number=2, oneof="response_type", message="TargetChange",
+ )
+
+ document_change = proto.Field(
+ proto.MESSAGE, number=3, oneof="response_type", message=write.DocumentChange,
+ )
+
+ document_delete = proto.Field(
+ proto.MESSAGE, number=4, oneof="response_type", message=write.DocumentDelete,
+ )
+
+ document_remove = proto.Field(
+ proto.MESSAGE, number=6, oneof="response_type", message=write.DocumentRemove,
+ )
+
+ filter = proto.Field(
+ proto.MESSAGE, number=5, oneof="response_type", message=write.ExistenceFilter,
+ )
+
+
+class Target(proto.Message):
+ r"""A specification of a set of documents to listen to.
+
+ Attributes:
+ query (~.firestore.Target.QueryTarget):
+ A target specified by a query.
+ documents (~.firestore.Target.DocumentsTarget):
+ A target specified by a set of document
+ names.
+ resume_token (bytes):
+ A resume token from a prior
+ [TargetChange][google.firestore.v1.TargetChange] for an
+ identical target.
+
+ Using a resume token with a different target is unsupported
+ and may fail.
+ read_time (~.timestamp.Timestamp):
+ Start listening after a specific ``read_time``.
+
+ The client must know the state of matching documents at this
+ time.
+ target_id (int):
+ The target ID that identifies the target on
+ the stream. Must be a positive number and non-
+ zero.
+ once (bool):
+ If the target should be removed once it is
+ current and consistent.
+ """
+
+ class DocumentsTarget(proto.Message):
+ r"""A target specified by a set of documents names.
+
+ Attributes:
+ documents (Sequence[str]):
+ The names of the documents to retrieve. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ The request will fail if any of the document is not a child
+ resource of the given ``database``. Duplicate names will be
+ elided.
+ """
+
+ documents = proto.RepeatedField(proto.STRING, number=2)
+
+ class QueryTarget(proto.Message):
+ r"""A target specified by a query.
+
+ Attributes:
+ parent (str):
+ The parent resource name. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents``
+ or
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents`` or
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ structured_query (~.gf_query.StructuredQuery):
+ A structured query.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ structured_query = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ oneof="query_type",
+ message=gf_query.StructuredQuery,
+ )
+
+ query = proto.Field(
+ proto.MESSAGE, number=2, oneof="target_type", message=QueryTarget,
+ )
+
+ documents = proto.Field(
+ proto.MESSAGE, number=3, oneof="target_type", message=DocumentsTarget,
+ )
+
+ resume_token = proto.Field(proto.BYTES, number=4, oneof="resume_type")
+
+ read_time = proto.Field(
+ proto.MESSAGE, number=11, oneof="resume_type", message=timestamp.Timestamp,
+ )
+
+ target_id = proto.Field(proto.INT32, number=5)
+
+ once = proto.Field(proto.BOOL, number=6)
+
+
+class TargetChange(proto.Message):
+ r"""Targets being watched have changed.
+
+ Attributes:
+ target_change_type (~.firestore.TargetChange.TargetChangeType):
+ The type of change that occurred.
+ target_ids (Sequence[int]):
+ The target IDs of targets that have changed.
+ If empty, the change applies to all targets.
+
+ The order of the target IDs is not defined.
+ cause (~.gr_status.Status):
+ The error that resulted in this change, if
+ applicable.
+ resume_token (bytes):
+ A token that can be used to resume the stream for the given
+ ``target_ids``, or all targets if ``target_ids`` is empty.
+
+ Not set on every target change.
+ read_time (~.timestamp.Timestamp):
+ The consistent ``read_time`` for the given ``target_ids``
+ (omitted when the target_ids are not at a consistent
+ snapshot).
+
+ The stream is guaranteed to send a ``read_time`` with
+ ``target_ids`` empty whenever the entire stream reaches a
+ new consistent snapshot. ADD, CURRENT, and RESET messages
+ are guaranteed to (eventually) result in a new consistent
+ snapshot (while NO_CHANGE and REMOVE messages are not).
+
+ For a given stream, ``read_time`` is guaranteed to be
+ monotonically increasing.
+ """
+
+ class TargetChangeType(proto.Enum):
+ r"""The type of change."""
+ NO_CHANGE = 0
+ ADD = 1
+ REMOVE = 2
+ CURRENT = 3
+ RESET = 4
+
+ target_change_type = proto.Field(proto.ENUM, number=1, enum=TargetChangeType,)
+
+ target_ids = proto.RepeatedField(proto.INT32, number=2)
+
+ cause = proto.Field(proto.MESSAGE, number=3, message=gr_status.Status,)
+
+ resume_token = proto.Field(proto.BYTES, number=4)
+
+ read_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,)
+
+
+class ListCollectionIdsRequest(proto.Message):
+ r"""The request for
+ [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+
+ Attributes:
+ parent (str):
+ Required. The parent document. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ For example:
+ ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+ page_size (int):
+ The maximum number of results to return.
+ page_token (str):
+ A page token. Must be a value from
+ [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse].
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ page_size = proto.Field(proto.INT32, number=2)
+
+ page_token = proto.Field(proto.STRING, number=3)
+
+
+class ListCollectionIdsResponse(proto.Message):
+ r"""The response from
+ [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
+
+ Attributes:
+ collection_ids (Sequence[str]):
+ The collection ids.
+ next_page_token (str):
+ A page token that may be used to continue the
+ list.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ collection_ids = proto.RepeatedField(proto.STRING, number=1)
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+
+class BatchWriteRequest(proto.Message):
+ r"""The request for
+ [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+
+ Attributes:
+ database (str):
+ Required. The database name. In the format:
+ ``projects/{project_id}/databases/{database_id}``.
+ writes (Sequence[~.write.Write]):
+ The writes to apply.
+ Method does not apply writes atomically and does
+ not guarantee ordering. Each write succeeds or
+ fails independently. You cannot write to the
+ same document more than once per request.
+ labels (Sequence[~.firestore.BatchWriteRequest.LabelsEntry]):
+ Labels associated with this batch write.
+ """
+
+ database = proto.Field(proto.STRING, number=1)
+
+ writes = proto.RepeatedField(proto.MESSAGE, number=2, message=write.Write,)
+
+ labels = proto.MapField(proto.STRING, proto.STRING, number=3)
+
+
+class BatchWriteResponse(proto.Message):
+ r"""The response from
+ [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
+
+ Attributes:
+ write_results (Sequence[~.write.WriteResult]):
+ The result of applying the writes.
+ This i-th write result corresponds to the i-th
+ write in the request.
+ status (Sequence[~.gr_status.Status]):
+ The status of applying the writes.
+ This i-th write status corresponds to the i-th
+ write in the request.
+ """
+
+ write_results = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=write.WriteResult,
+ )
+
+ status = proto.RepeatedField(proto.MESSAGE, number=2, message=gr_status.Status,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py
new file mode 100644
index 0000000000..8a65a3623a
--- /dev/null
+++ b/google/cloud/firestore_v1/types/query.py
@@ -0,0 +1,302 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_v1.types import document
+from google.protobuf import wrappers_pb2 as wrappers # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.v1", manifest={"StructuredQuery", "Cursor",},
+)
+
+
+class StructuredQuery(proto.Message):
+ r"""A Firestore query.
+
+ Attributes:
+ select (~.query.StructuredQuery.Projection):
+ The projection to return.
+ from_ (Sequence[~.query.StructuredQuery.CollectionSelector]):
+ The collections to query.
+ where (~.query.StructuredQuery.Filter):
+ The filter to apply.
+ order_by (Sequence[~.query.StructuredQuery.Order]):
+ The order to apply to the query results.
+
+ Firestore guarantees a stable ordering through the following
+ rules:
+
+ - Any field required to appear in ``order_by``, that is not
+ already specified in ``order_by``, is appended to the
+ order in field name order by default.
+ - If an order on ``__name__`` is not specified, it is
+ appended by default.
+
+ Fields are appended with the same sort direction as the last
+ order specified, or 'ASCENDING' if no order was specified.
+ For example:
+
+ - ``SELECT * FROM Foo ORDER BY A`` becomes
+ ``SELECT * FROM Foo ORDER BY A, __name__``
+ - ``SELECT * FROM Foo ORDER BY A DESC`` becomes
+ ``SELECT * FROM Foo ORDER BY A DESC, __name__ DESC``
+ - ``SELECT * FROM Foo WHERE A > 1`` becomes
+ ``SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__``
+ start_at (~.query.Cursor):
+ A starting point for the query results.
+ end_at (~.query.Cursor):
+ A end point for the query results.
+ offset (int):
+ The number of results to skip.
+ Applies before limit, but after all other
+ constraints. Must be >= 0 if specified.
+ limit (~.wrappers.Int32Value):
+ The maximum number of results to return.
+ Applies after all other constraints.
+ Must be >= 0 if specified.
+ """
+
+ class Direction(proto.Enum):
+ r"""A sort direction."""
+ DIRECTION_UNSPECIFIED = 0
+ ASCENDING = 1
+ DESCENDING = 2
+
+ class CollectionSelector(proto.Message):
+ r"""A selection of a collection, such as ``messages as m1``.
+
+ Attributes:
+ collection_id (str):
+ The collection ID.
+ When set, selects only collections with this ID.
+ all_descendants (bool):
+ When false, selects only collections that are immediate
+ children of the ``parent`` specified in the containing
+ ``RunQueryRequest``. When true, selects all descendant
+ collections.
+ """
+
+ collection_id = proto.Field(proto.STRING, number=2)
+
+ all_descendants = proto.Field(proto.BOOL, number=3)
+
+ class Filter(proto.Message):
+ r"""A filter.
+
+ Attributes:
+ composite_filter (~.query.StructuredQuery.CompositeFilter):
+ A composite filter.
+ field_filter (~.query.StructuredQuery.FieldFilter):
+ A filter on a document field.
+ unary_filter (~.query.StructuredQuery.UnaryFilter):
+ A filter that takes exactly one argument.
+ """
+
+ composite_filter = proto.Field(
+ proto.MESSAGE,
+ number=1,
+ oneof="filter_type",
+ message="StructuredQuery.CompositeFilter",
+ )
+
+ field_filter = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ oneof="filter_type",
+ message="StructuredQuery.FieldFilter",
+ )
+
+ unary_filter = proto.Field(
+ proto.MESSAGE,
+ number=3,
+ oneof="filter_type",
+ message="StructuredQuery.UnaryFilter",
+ )
+
+ class CompositeFilter(proto.Message):
+ r"""A filter that merges multiple other filters using the given
+ operator.
+
+ Attributes:
+ op (~.query.StructuredQuery.CompositeFilter.Operator):
+ The operator for combining multiple filters.
+ filters (Sequence[~.query.StructuredQuery.Filter]):
+ The list of filters to combine.
+ Must contain at least one filter.
+ """
+
+ class Operator(proto.Enum):
+ r"""A composite filter operator."""
+ OPERATOR_UNSPECIFIED = 0
+ AND = 1
+
+ op = proto.Field(
+ proto.ENUM, number=1, enum="StructuredQuery.CompositeFilter.Operator",
+ )
+
+ filters = proto.RepeatedField(
+ proto.MESSAGE, number=2, message="StructuredQuery.Filter",
+ )
+
+ class FieldFilter(proto.Message):
+ r"""A filter on a specific field.
+
+ Attributes:
+ field (~.query.StructuredQuery.FieldReference):
+ The field to filter by.
+ op (~.query.StructuredQuery.FieldFilter.Operator):
+ The operator to filter by.
+ value (~.document.Value):
+ The value to compare to.
+ """
+
+ class Operator(proto.Enum):
+ r"""A field filter operator."""
+ OPERATOR_UNSPECIFIED = 0
+ LESS_THAN = 1
+ LESS_THAN_OR_EQUAL = 2
+ GREATER_THAN = 3
+ GREATER_THAN_OR_EQUAL = 4
+ EQUAL = 5
+ NOT_EQUAL = 6
+ ARRAY_CONTAINS = 7
+ IN = 8
+ ARRAY_CONTAINS_ANY = 9
+ NOT_IN = 10
+
+ field = proto.Field(
+ proto.MESSAGE, number=1, message="StructuredQuery.FieldReference",
+ )
+
+ op = proto.Field(
+ proto.ENUM, number=2, enum="StructuredQuery.FieldFilter.Operator",
+ )
+
+ value = proto.Field(proto.MESSAGE, number=3, message=document.Value,)
+
+ class UnaryFilter(proto.Message):
+ r"""A filter with a single operand.
+
+ Attributes:
+ op (~.query.StructuredQuery.UnaryFilter.Operator):
+ The unary operator to apply.
+ field (~.query.StructuredQuery.FieldReference):
+ The field to which to apply the operator.
+ """
+
+ class Operator(proto.Enum):
+ r"""A unary operator."""
+ OPERATOR_UNSPECIFIED = 0
+ IS_NAN = 2
+ IS_NULL = 3
+ IS_NOT_NAN = 4
+ IS_NOT_NULL = 5
+
+ op = proto.Field(
+ proto.ENUM, number=1, enum="StructuredQuery.UnaryFilter.Operator",
+ )
+
+ field = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ oneof="operand_type",
+ message="StructuredQuery.FieldReference",
+ )
+
+ class Order(proto.Message):
+ r"""An order on a field.
+
+ Attributes:
+ field (~.query.StructuredQuery.FieldReference):
+ The field to order by.
+ direction (~.query.StructuredQuery.Direction):
+ The direction to order by. Defaults to ``ASCENDING``.
+ """
+
+ field = proto.Field(
+ proto.MESSAGE, number=1, message="StructuredQuery.FieldReference",
+ )
+
+ direction = proto.Field(proto.ENUM, number=2, enum="StructuredQuery.Direction",)
+
+ class FieldReference(proto.Message):
+ r"""A reference to a field, such as ``max(messages.time) as max_time``.
+
+ Attributes:
+ field_path (str):
+
+ """
+
+ field_path = proto.Field(proto.STRING, number=2)
+
+ class Projection(proto.Message):
+ r"""The projection of document's fields to return.
+
+ Attributes:
+ fields (Sequence[~.query.StructuredQuery.FieldReference]):
+ The fields to return.
+
+ If empty, all fields are returned. To only return the name
+ of the document, use ``['__name__']``.
+ """
+
+ fields = proto.RepeatedField(
+ proto.MESSAGE, number=2, message="StructuredQuery.FieldReference",
+ )
+
+ select = proto.Field(proto.MESSAGE, number=1, message=Projection,)
+
+ from_ = proto.RepeatedField(proto.MESSAGE, number=2, message=CollectionSelector,)
+
+ where = proto.Field(proto.MESSAGE, number=3, message=Filter,)
+
+ order_by = proto.RepeatedField(proto.MESSAGE, number=4, message=Order,)
+
+ start_at = proto.Field(proto.MESSAGE, number=7, message="Cursor",)
+
+ end_at = proto.Field(proto.MESSAGE, number=8, message="Cursor",)
+
+ offset = proto.Field(proto.INT32, number=6)
+
+ limit = proto.Field(proto.MESSAGE, number=5, message=wrappers.Int32Value,)
+
+
+class Cursor(proto.Message):
+ r"""A position in a query result set.
+
+ Attributes:
+ values (Sequence[~.document.Value]):
+ The values that represent a position, in the
+ order they appear in the order by clause of a
+ query.
+ Can contain fewer values than specified in the
+ order by clause.
+ before (bool):
+ If the position is just before or just after
+ the given values, relative to the sort order
+ defined by the query.
+ """
+
+ values = proto.RepeatedField(proto.MESSAGE, number=1, message=document.Value,)
+
+ before = proto.Field(proto.BOOL, number=2)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py
new file mode 100644
index 0000000000..6b3f49b530
--- /dev/null
+++ b/google/cloud/firestore_v1/types/write.py
@@ -0,0 +1,381 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document as gf_document
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.v1",
+ manifest={
+ "Write",
+ "DocumentTransform",
+ "WriteResult",
+ "DocumentChange",
+ "DocumentDelete",
+ "DocumentRemove",
+ "ExistenceFilter",
+ },
+)
+
+
+class Write(proto.Message):
+ r"""A write on a document.
+
+ Attributes:
+ update (~.gf_document.Document):
+ A document to write.
+ delete (str):
+ A document name to delete. In the format:
+ ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+ transform (~.write.DocumentTransform):
+ Applies a transformation to a document.
+ update_mask (~.common.DocumentMask):
+ The fields to update in this write.
+
+ This field can be set only when the operation is ``update``.
+ If the mask is not set for an ``update`` and the document
+ exists, any existing data will be overwritten. If the mask
+ is set and the document on the server has fields not covered
+ by the mask, they are left unchanged. Fields referenced in
+ the mask, but not present in the input document, are deleted
+ from the document on the server. The field paths in this
+ mask must not contain a reserved field name.
+ update_transforms (Sequence[~.write.DocumentTransform.FieldTransform]):
+ The transforms to perform after update.
+
+ This field can be set only when the operation is ``update``.
+ If present, this write is equivalent to performing
+ ``update`` and ``transform`` to the same document atomically
+ and in order.
+ current_document (~.common.Precondition):
+ An optional precondition on the document.
+ The write will fail if this is set and not met
+ by the target document.
+ """
+
+ update = proto.Field(
+ proto.MESSAGE, number=1, oneof="operation", message=gf_document.Document,
+ )
+
+ delete = proto.Field(proto.STRING, number=2, oneof="operation")
+
+ transform = proto.Field(
+ proto.MESSAGE, number=6, oneof="operation", message="DocumentTransform",
+ )
+
+ update_mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,)
+
+ update_transforms = proto.RepeatedField(
+ proto.MESSAGE, number=7, message="DocumentTransform.FieldTransform",
+ )
+
+ current_document = proto.Field(
+ proto.MESSAGE, number=4, message=common.Precondition,
+ )
+
+
+class DocumentTransform(proto.Message):
+ r"""A transformation of a document.
+
+ Attributes:
+ document (str):
+ The name of the document to transform.
+ field_transforms (Sequence[~.write.DocumentTransform.FieldTransform]):
+ The list of transformations to apply to the
+ fields of the document, in order.
+ This must not be empty.
+ """
+
+ class FieldTransform(proto.Message):
+ r"""A transformation of a field of the document.
+
+ Attributes:
+ field_path (str):
+ The path of the field. See
+ [Document.fields][google.firestore.v1.Document.fields] for
+ the field path syntax reference.
+ set_to_server_value (~.write.DocumentTransform.FieldTransform.ServerValue):
+ Sets the field to the given server value.
+ increment (~.gf_document.Value):
+ Adds the given value to the field's current
+ value.
+ This must be an integer or a double value.
+ If the field is not an integer or double, or if
+ the field does not yet exist, the transformation
+ will set the field to the given value. If either
+ of the given value or the current field value
+ are doubles, both values will be interpreted as
+ doubles. Double arithmetic and representation of
+ double values follow IEEE 754 semantics. If
+ there is positive/negative integer overflow, the
+ field is resolved to the largest magnitude
+ positive/negative integer.
+ maximum (~.gf_document.Value):
+ Sets the field to the maximum of its current
+ value and the given value.
+ This must be an integer or a double value.
+ If the field is not an integer or double, or if
+ the field does not yet exist, the transformation
+ will set the field to the given value. If a
+ maximum operation is applied where the field and
+ the input value are of mixed types (that is -
+ one is an integer and one is a double) the field
+ takes on the type of the larger operand. If the
+ operands are equivalent (e.g. 3 and 3.0), the
+ field does not change. 0, 0.0, and -0.0 are all
+ zero. The maximum of a zero stored value and
+ zero input value is always the stored value.
+ The maximum of any numeric value x and NaN is
+ NaN.
+ minimum (~.gf_document.Value):
+ Sets the field to the minimum of its current
+ value and the given value.
+ This must be an integer or a double value.
+ If the field is not an integer or double, or if
+ the field does not yet exist, the transformation
+ will set the field to the input value. If a
+ minimum operation is applied where the field and
+ the input value are of mixed types (that is -
+ one is an integer and one is a double) the field
+ takes on the type of the smaller operand. If the
+ operands are equivalent (e.g. 3 and 3.0), the
+ field does not change. 0, 0.0, and -0.0 are all
+ zero. The minimum of a zero stored value and
+ zero input value is always the stored value.
+ The minimum of any numeric value x and NaN is
+ NaN.
+ append_missing_elements (~.gf_document.ArrayValue):
+ Append the given elements in order if they are not already
+ present in the current field value. If the field is not an
+ array, or if the field does not yet exist, it is first set
+ to the empty array.
+
+ Equivalent numbers of different types (e.g. 3L and 3.0) are
+ considered equal when checking if a value is missing. NaN is
+ equal to NaN, and Null is equal to Null. If the input
+ contains multiple equivalent values, only the first will be
+ considered.
+
+ The corresponding transform_result will be the null value.
+ remove_all_from_array (~.gf_document.ArrayValue):
+ Remove all of the given elements from the array in the
+ field. If the field is not an array, or if the field does
+ not yet exist, it is set to the empty array.
+
+ Equivalent numbers of the different types (e.g. 3L and 3.0)
+ are considered equal when deciding whether an element should
+ be removed. NaN is equal to NaN, and Null is equal to Null.
+ This will remove all equivalent values if there are
+ duplicates.
+
+ The corresponding transform_result will be the null value.
+ """
+
+ class ServerValue(proto.Enum):
+ r"""A value that is calculated by the server."""
+ SERVER_VALUE_UNSPECIFIED = 0
+ REQUEST_TIME = 1
+
+ field_path = proto.Field(proto.STRING, number=1)
+
+ set_to_server_value = proto.Field(
+ proto.ENUM,
+ number=2,
+ oneof="transform_type",
+ enum="DocumentTransform.FieldTransform.ServerValue",
+ )
+
+ increment = proto.Field(
+ proto.MESSAGE, number=3, oneof="transform_type", message=gf_document.Value,
+ )
+
+ maximum = proto.Field(
+ proto.MESSAGE, number=4, oneof="transform_type", message=gf_document.Value,
+ )
+
+ minimum = proto.Field(
+ proto.MESSAGE, number=5, oneof="transform_type", message=gf_document.Value,
+ )
+
+ append_missing_elements = proto.Field(
+ proto.MESSAGE,
+ number=6,
+ oneof="transform_type",
+ message=gf_document.ArrayValue,
+ )
+
+ remove_all_from_array = proto.Field(
+ proto.MESSAGE,
+ number=7,
+ oneof="transform_type",
+ message=gf_document.ArrayValue,
+ )
+
+ document = proto.Field(proto.STRING, number=1)
+
+ field_transforms = proto.RepeatedField(
+ proto.MESSAGE, number=2, message=FieldTransform,
+ )
+
+
+class WriteResult(proto.Message):
+ r"""The result of applying a write.
+
+ Attributes:
+ update_time (~.timestamp.Timestamp):
+ The last update time of the document after applying the
+ write. Not set after a ``delete``.
+
+ If the write did not actually change the document, this will
+ be the previous update_time.
+ transform_results (Sequence[~.gf_document.Value]):
+ The results of applying each
+ [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform],
+ in the same order.
+ """
+
+ update_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
+
+ transform_results = proto.RepeatedField(
+ proto.MESSAGE, number=2, message=gf_document.Value,
+ )
+
+
+class DocumentChange(proto.Message):
+ r"""A [Document][google.firestore.v1.Document] has changed.
+
+ May be the result of multiple [writes][google.firestore.v1.Write],
+ including deletes, that ultimately resulted in a new value for the
+ [Document][google.firestore.v1.Document].
+
+ Multiple [DocumentChange][google.firestore.v1.DocumentChange]
+ messages may be returned for the same logical change, if multiple
+ targets are affected.
+
+ Attributes:
+ document (~.gf_document.Document):
+ The new state of the
+ [Document][google.firestore.v1.Document].
+
+ If ``mask`` is set, contains only fields that were updated
+ or added.
+ target_ids (Sequence[int]):
+ A set of target IDs of targets that match
+ this document.
+ removed_target_ids (Sequence[int]):
+ A set of target IDs for targets that no
+ longer match this document.
+ """
+
+ document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,)
+
+ target_ids = proto.RepeatedField(proto.INT32, number=5)
+
+ removed_target_ids = proto.RepeatedField(proto.INT32, number=6)
+
+
+class DocumentDelete(proto.Message):
+ r"""A [Document][google.firestore.v1.Document] has been deleted.
+
+ May be the result of multiple [writes][google.firestore.v1.Write],
+ including updates, the last of which deleted the
+ [Document][google.firestore.v1.Document].
+
+ Multiple [DocumentDelete][google.firestore.v1.DocumentDelete]
+ messages may be returned for the same logical delete, if multiple
+ targets are affected.
+
+ Attributes:
+ document (str):
+ The resource name of the
+ [Document][google.firestore.v1.Document] that was deleted.
+ removed_target_ids (Sequence[int]):
+ A set of target IDs for targets that
+ previously matched this entity.
+ read_time (~.timestamp.Timestamp):
+ The read timestamp at which the delete was observed.
+
+ Greater or equal to the ``commit_time`` of the delete.
+ """
+
+ document = proto.Field(proto.STRING, number=1)
+
+ removed_target_ids = proto.RepeatedField(proto.INT32, number=6)
+
+ read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
+
+
+class DocumentRemove(proto.Message):
+ r"""A [Document][google.firestore.v1.Document] has been removed from the
+ view of the targets.
+
+ Sent if the document is no longer relevant to a target and is out of
+ view. Can be sent instead of a DocumentDelete or a DocumentChange if
+ the server can not send the new value of the document.
+
+ Multiple [DocumentRemove][google.firestore.v1.DocumentRemove]
+ messages may be returned for the same logical write or delete, if
+ multiple targets are affected.
+
+ Attributes:
+ document (str):
+ The resource name of the
+ [Document][google.firestore.v1.Document] that has gone out
+ of view.
+ removed_target_ids (Sequence[int]):
+ A set of target IDs for targets that
+ previously matched this document.
+ read_time (~.timestamp.Timestamp):
+ The read timestamp at which the remove was observed.
+
+ Greater or equal to the ``commit_time`` of the
+ change/delete/remove.
+ """
+
+ document = proto.Field(proto.STRING, number=1)
+
+ removed_target_ids = proto.RepeatedField(proto.INT32, number=2)
+
+ read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
+
+
+class ExistenceFilter(proto.Message):
+ r"""A digest of all the documents that match a given target.
+
+ Attributes:
+ target_id (int):
+ The target ID to which this filter applies.
+ count (int):
+ The total count of documents that match
+ [target_id][google.firestore.v1.ExistenceFilter.target_id].
+
+ If different from the count of documents in the client that
+ match, the client must manually determine which documents no
+ longer match the target.
+ """
+
+ target_id = proto.Field(proto.INT32, number=1)
+
+ count = proto.Field(proto.INT32, number=2)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/watch.py b/google/cloud/firestore_v1/watch.py
index 1037322230..466821bb50 100644
--- a/google/cloud/firestore_v1/watch.py
+++ b/google/cloud/firestore_v1/watch.py
@@ -15,20 +15,17 @@
import logging
import collections
import threading
-import datetime
from enum import Enum
import functools
-import pytz
-
-from google.api_core.bidi import ResumableBidiRpc
-from google.api_core.bidi import BackgroundConsumer
-from google.cloud.firestore_v1.proto import firestore_pb2
+from google.api_core.bidi import ResumableBidiRpc # type: ignore
+from google.api_core.bidi import BackgroundConsumer # type: ignore
+from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1 import _helpers
-from google.api_core import exceptions
+from google.api_core import exceptions # type: ignore
-import grpc
+import grpc # type: ignore
"""Python client for Google Cloud Firestore Watch."""
@@ -73,7 +70,7 @@
class WatchDocTree(object):
- # TODO: Currently this uses a dict. Other implementations us an rbtree.
+ # TODO: Currently this uses a dict. Other implementations use a rbtree.
# The performance of this implementation should be investigated and may
# require modifying the underlying datastructure to a rbtree.
def __init__(self):
@@ -221,7 +218,7 @@ def __init__(
ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests
self._rpc = ResumableBidiRpc(
- self._api.transport.listen,
+ self._api._transport.listen,
should_recover=_should_recover,
should_terminate=_should_terminate,
initial_request=rpc_request,
@@ -261,7 +258,8 @@ def __init__(
def _get_rpc_request(self):
if self.resume_token is not None:
self._targets["resume_token"] = self.resume_token
- return firestore_pb2.ListenRequest(
+
+ return firestore.ListenRequest(
database=self._firestore._database_string, add_target=self._targets
)
@@ -367,14 +365,14 @@ def for_query(
cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance
):
parent_path, _ = query._parent._parent_info()
- query_target = firestore_pb2.Target.QueryTarget(
+ query_target = firestore.Target.QueryTarget(
parent=parent_path, structured_query=query._to_protobuf()
)
return cls(
query,
query._client,
- {"query": query_target, "target_id": WATCH_TARGET_ID},
+ {"query": query_target._pb, "target_id": WATCH_TARGET_ID},
query._comparator,
snapshot_callback,
snapshot_class_instance,
@@ -387,7 +385,8 @@ def _on_snapshot_target_change_no_change(self, proto):
no_target_ids = change.target_ids is None or len(change.target_ids) == 0
if no_target_ids and change.read_time and self.current:
- # TargetChange.CURRENT followed by TargetChange.NO_CHANGE
+ # TargetChange.TargetChangeType.CURRENT followed by
+ # TargetChange.TargetChangeType.NO_CHANGE
# signals a consistent state. Invoke the onSnapshot
# callback as specified by the user.
self.push(change.read_time, change.resume_token)
@@ -431,14 +430,14 @@ def on_snapshot(self, proto):
listen_response(`google.cloud.firestore_v1.types.ListenResponse`):
Callback method that receives a object to
"""
- TargetChange = firestore_pb2.TargetChange
+ TargetChange = firestore.TargetChange
target_changetype_dispatch = {
- TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change,
- TargetChange.ADD: self._on_snapshot_target_change_add,
- TargetChange.REMOVE: self._on_snapshot_target_change_remove,
- TargetChange.RESET: self._on_snapshot_target_change_reset,
- TargetChange.CURRENT: self._on_snapshot_target_change_current,
+ TargetChange.TargetChangeType.NO_CHANGE: self._on_snapshot_target_change_no_change,
+ TargetChange.TargetChangeType.ADD: self._on_snapshot_target_change_add,
+ TargetChange.TargetChangeType.REMOVE: self._on_snapshot_target_change_remove,
+ TargetChange.TargetChangeType.RESET: self._on_snapshot_target_change_reset,
+ TargetChange.TargetChangeType.CURRENT: self._on_snapshot_target_change_current,
}
target_change = getattr(proto, "target_change", "")
@@ -566,11 +565,7 @@ def push(self, read_time, next_resume_token):
key = functools.cmp_to_key(self._comparator)
keys = sorted(updated_tree.keys(), key=key)
- self._snapshot_callback(
- keys,
- appliedChanges,
- datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc),
- )
+ self._snapshot_callback(keys, appliedChanges, read_time)
self.has_pushed = True
self.doc_tree = updated_tree
diff --git a/google/cloud/firestore_v1beta1/__init__.py b/google/cloud/firestore_v1beta1/__init__.py
deleted file mode 100644
index a1d80278f1..0000000000
--- a/google/cloud/firestore_v1beta1/__init__.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Python idiomatic client for Google Cloud Firestore."""
-
-from pkg_resources import get_distribution
-import warnings
-
-__version__ = get_distribution("google-cloud-firestore").version
-
-from google.cloud.firestore_v1beta1 import types
-from google.cloud.firestore_v1beta1._helpers import GeoPoint
-from google.cloud.firestore_v1beta1._helpers import ExistsOption
-from google.cloud.firestore_v1beta1._helpers import LastUpdateOption
-from google.cloud.firestore_v1beta1._helpers import ReadAfterWriteError
-from google.cloud.firestore_v1beta1._helpers import WriteOption
-from google.cloud.firestore_v1beta1.batch import WriteBatch
-from google.cloud.firestore_v1beta1.client import Client
-from google.cloud.firestore_v1beta1.collection import CollectionReference
-from google.cloud.firestore_v1beta1.transforms import ArrayRemove
-from google.cloud.firestore_v1beta1.transforms import ArrayUnion
-from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-from google.cloud.firestore_v1beta1.document import DocumentReference
-from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-from google.cloud.firestore_v1beta1.gapic import enums
-from google.cloud.firestore_v1beta1.query import Query
-from google.cloud.firestore_v1beta1.transaction import Transaction
-from google.cloud.firestore_v1beta1.transaction import transactional
-from google.cloud.firestore_v1beta1.watch import Watch
-
-
-_V1BETA1_DEPRECATED_MESSAGE = (
- "The 'v1beta1' API endpoint is deprecated. "
- "The client/library which supports it will be removed in a future release."
-)
-warnings.warn(_V1BETA1_DEPRECATED_MESSAGE, DeprecationWarning)
-
-
-__all__ = [
- "__version__",
- "ArrayRemove",
- "ArrayUnion",
- "Client",
- "CollectionReference",
- "DELETE_FIELD",
- "DocumentReference",
- "DocumentSnapshot",
- "enums",
- "ExistsOption",
- "GeoPoint",
- "LastUpdateOption",
- "Query",
- "ReadAfterWriteError",
- "SERVER_TIMESTAMP",
- "Transaction",
- "transactional",
- "types",
- "Watch",
- "WriteBatch",
- "WriteOption",
-]
diff --git a/google/cloud/firestore_v1beta1/_helpers.py b/google/cloud/firestore_v1beta1/_helpers.py
deleted file mode 100644
index 11dcefc98f..0000000000
--- a/google/cloud/firestore_v1beta1/_helpers.py
+++ /dev/null
@@ -1,998 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Common helpers shared across Google Cloud Firestore modules."""
-
-import datetime
-
-from google.protobuf import struct_pb2
-from google.type import latlng_pb2
-import grpc
-import six
-
-from google.cloud import exceptions
-from google.cloud._helpers import _datetime_to_pb_timestamp
-from google.api_core.datetime_helpers import DatetimeWithNanoseconds
-from google.cloud.firestore_v1beta1 import transforms
-from google.cloud.firestore_v1beta1 import types
-from google.cloud.firestore_v1beta1.field_path import FieldPath
-from google.cloud.firestore_v1beta1.field_path import parse_field_path
-from google.cloud.firestore_v1beta1.gapic import enums
-from google.cloud.firestore_v1beta1.proto import common_pb2
-from google.cloud.firestore_v1beta1.proto import document_pb2
-from google.cloud.firestore_v1beta1.proto import write_pb2
-
-
-BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}."
-DOCUMENT_PATH_DELIMITER = "/"
-INACTIVE_TXN = "Transaction not in progress, cannot be used in API requests."
-READ_AFTER_WRITE_ERROR = "Attempted read after write in a transaction."
-BAD_REFERENCE_ERROR = (
- "Reference value {!r} in unexpected format, expected to be of the form "
- "``projects/{{project}}/databases/{{database}}/"
- "documents/{{document_path}}``."
-)
-WRONG_APP_REFERENCE = (
- "Document {!r} does not correspond to the same database " "({!r}) as the client."
-)
-REQUEST_TIME_ENUM = enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME
-_GRPC_ERROR_MAPPING = {
- grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict,
- grpc.StatusCode.NOT_FOUND: exceptions.NotFound,
-}
-
-
-class GeoPoint(object):
- """Simple container for a geo point value.
-
- Args:
- latitude (float): Latitude of a point.
- longitude (float): Longitude of a point.
- """
-
- def __init__(self, latitude, longitude):
- self.latitude = latitude
- self.longitude = longitude
-
- def to_protobuf(self):
- """Convert the current object to protobuf.
-
- Returns:
- google.type.latlng_pb2.LatLng: The current point as a protobuf.
- """
- return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude)
-
- def __eq__(self, other):
- """Compare two geo points for equality.
-
- Returns:
- Union[bool, NotImplemented]: :data:`True` if the points compare
- equal, else :data:`False`. (Or :data:`NotImplemented` if
- ``other`` is not a geo point.)
- """
- if not isinstance(other, GeoPoint):
- return NotImplemented
-
- return self.latitude == other.latitude and self.longitude == other.longitude
-
- def __ne__(self, other):
- """Compare two geo points for inequality.
-
- Returns:
- Union[bool, NotImplemented]: :data:`False` if the points compare
- equal, else :data:`True`. (Or :data:`NotImplemented` if
- ``other`` is not a geo point.)
- """
- equality_val = self.__eq__(other)
- if equality_val is NotImplemented:
- return NotImplemented
- else:
- return not equality_val
-
-
-def verify_path(path, is_collection):
- """Verifies that a ``path`` has the correct form.
-
- Checks that all of the elements in ``path`` are strings.
-
- Args:
- path (Tuple[str, ...]): The components in a collection or
- document path.
- is_collection (bool): Indicates if the ``path`` represents
- a document or a collection.
-
- Raises:
- ValueError: if
-
- * the ``path`` is empty
- * ``is_collection=True`` and there are an even number of elements
- * ``is_collection=False`` and there are an odd number of elements
- * an element is not a string
- """
- num_elements = len(path)
- if num_elements == 0:
- raise ValueError("Document or collection path cannot be empty")
-
- if is_collection:
- if num_elements % 2 == 0:
- raise ValueError("A collection must have an odd number of path elements")
- else:
- if num_elements % 2 == 1:
- raise ValueError("A document must have an even number of path elements")
-
- for element in path:
- if not isinstance(element, six.string_types):
- msg = BAD_PATH_TEMPLATE.format(element, type(element))
- raise ValueError(msg)
-
-
-def encode_value(value):
- """Converts a native Python value into a Firestore protobuf ``Value``.
-
- Args:
- value (Union[NoneType, bool, int, float, datetime.datetime, \
- str, bytes, dict, ~google.cloud.Firestore.GeoPoint]): A native
- Python value to convert to a protobuf field.
-
- Returns:
- ~google.cloud.firestore_v1beta1.types.Value: A
- value encoded as a Firestore protobuf.
-
- Raises:
- TypeError: If the ``value`` is not one of the accepted types.
- """
- if value is None:
- return document_pb2.Value(null_value=struct_pb2.NULL_VALUE)
-
- # Must come before six.integer_types since ``bool`` is an integer subtype.
- if isinstance(value, bool):
- return document_pb2.Value(boolean_value=value)
-
- if isinstance(value, six.integer_types):
- return document_pb2.Value(integer_value=value)
-
- if isinstance(value, float):
- return document_pb2.Value(double_value=value)
-
- if isinstance(value, DatetimeWithNanoseconds):
- return document_pb2.Value(timestamp_value=value.timestamp_pb())
-
- if isinstance(value, datetime.datetime):
- return document_pb2.Value(timestamp_value=_datetime_to_pb_timestamp(value))
-
- if isinstance(value, six.text_type):
- return document_pb2.Value(string_value=value)
-
- if isinstance(value, six.binary_type):
- return document_pb2.Value(bytes_value=value)
-
- # NOTE: We avoid doing an isinstance() check for a Document
- # here to avoid import cycles.
- document_path = getattr(value, "_document_path", None)
- if document_path is not None:
- return document_pb2.Value(reference_value=document_path)
-
- if isinstance(value, GeoPoint):
- return document_pb2.Value(geo_point_value=value.to_protobuf())
-
- if isinstance(value, list):
- value_list = [encode_value(element) for element in value]
- value_pb = document_pb2.ArrayValue(values=value_list)
- return document_pb2.Value(array_value=value_pb)
-
- if isinstance(value, dict):
- value_dict = encode_dict(value)
- value_pb = document_pb2.MapValue(fields=value_dict)
- return document_pb2.Value(map_value=value_pb)
-
- raise TypeError(
- "Cannot convert to a Firestore Value", value, "Invalid type", type(value)
- )
-
-
-def encode_dict(values_dict):
- """Encode a dictionary into protobuf ``Value``-s.
-
- Args:
- values_dict (dict): The dictionary to encode as protobuf fields.
-
- Returns:
- Dict[str, ~google.cloud.firestore_v1beta1.types.Value]: A
- dictionary of string keys and ``Value`` protobufs as dictionary
- values.
- """
- return {key: encode_value(value) for key, value in six.iteritems(values_dict)}
-
-
-def reference_value_to_document(reference_value, client):
- """Convert a reference value string to a document.
-
- Args:
- reference_value (str): A document reference value.
- client (~.firestore_v1beta1.client.Client): A client that has
- a document factory.
-
- Returns:
- ~.firestore_v1beta1.document.DocumentReference: The document
- corresponding to ``reference_value``.
-
- Raises:
- ValueError: If the ``reference_value`` is not of the expected
- format: ``projects/{project}/databases/{database}/documents/...``.
- ValueError: If the ``reference_value`` does not come from the same
- project / database combination as the ``client``.
- """
- # The first 5 parts are
- # projects, {project}, databases, {database}, documents
- parts = reference_value.split(DOCUMENT_PATH_DELIMITER, 5)
- if len(parts) != 6:
- msg = BAD_REFERENCE_ERROR.format(reference_value)
- raise ValueError(msg)
-
- # The sixth part is `a/b/c/d` (i.e. the document path)
- document = client.document(parts[-1])
- if document._document_path != reference_value:
- msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string)
- raise ValueError(msg)
-
- return document
-
-
-def decode_value(value, client):
- """Converts a Firestore protobuf ``Value`` to a native Python value.
-
- Args:
- value (google.cloud.firestore_v1beta1.types.Value): A
- Firestore protobuf to be decoded / parsed / converted.
- client (~.firestore_v1beta1.client.Client): A client that has
- a document factory.
-
- Returns:
- Union[NoneType, bool, int, float, datetime.datetime, \
- str, bytes, dict, ~google.cloud.Firestore.GeoPoint]: A native
- Python value converted from the ``value``.
-
- Raises:
- NotImplementedError: If the ``value_type`` is ``reference_value``.
- ValueError: If the ``value_type`` is unknown.
- """
- value_type = value.WhichOneof("value_type")
-
- if value_type == "null_value":
- return None
- elif value_type == "boolean_value":
- return value.boolean_value
- elif value_type == "integer_value":
- return value.integer_value
- elif value_type == "double_value":
- return value.double_value
- elif value_type == "timestamp_value":
- return DatetimeWithNanoseconds.from_timestamp_pb(value.timestamp_value)
- elif value_type == "string_value":
- return value.string_value
- elif value_type == "bytes_value":
- return value.bytes_value
- elif value_type == "reference_value":
- return reference_value_to_document(value.reference_value, client)
- elif value_type == "geo_point_value":
- return GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude)
- elif value_type == "array_value":
- return [decode_value(element, client) for element in value.array_value.values]
- elif value_type == "map_value":
- return decode_dict(value.map_value.fields, client)
- else:
- raise ValueError("Unknown ``value_type``", value_type)
-
-
-def decode_dict(value_fields, client):
- """Converts a protobuf map of Firestore ``Value``-s.
-
- Args:
- value_fields (google.protobuf.pyext._message.MessageMapContainer): A
- protobuf map of Firestore ``Value``-s.
- client (~.firestore_v1beta1.client.Client): A client that has
- a document factory.
-
- Returns:
- Dict[str, Union[NoneType, bool, int, float, datetime.datetime, \
- str, bytes, dict, ~google.cloud.Firestore.GeoPoint]]: A dictionary
- of native Python values converted from the ``value_fields``.
- """
- return {
- key: decode_value(value, client) for key, value in six.iteritems(value_fields)
- }
-
-
-def get_doc_id(document_pb, expected_prefix):
- """Parse a document ID from a document protobuf.
-
- Args:
- document_pb (google.cloud.proto.firestore.v1beta1.\
- document_pb2.Document): A protobuf for a document that
- was created in a ``CreateDocument`` RPC.
- expected_prefix (str): The expected collection prefix for the
- fully-qualified document name.
-
- Returns:
- str: The document ID from the protobuf.
-
- Raises:
- ValueError: If the name does not begin with the prefix.
- """
- prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1)
- if prefix != expected_prefix:
- raise ValueError(
- "Unexpected document name",
- document_pb.name,
- "Expected to begin with",
- expected_prefix,
- )
-
- return document_id
-
-
-_EmptyDict = transforms.Sentinel("Marker for an empty dict value")
-
-
-def extract_fields(document_data, prefix_path, expand_dots=False):
- """Do depth-first walk of tree, yielding field_path, value"""
- if not document_data:
- yield prefix_path, _EmptyDict
- else:
- for key, value in sorted(six.iteritems(document_data)):
-
- if expand_dots:
- sub_key = FieldPath.from_string(key)
- else:
- sub_key = FieldPath(key)
-
- field_path = FieldPath(*(prefix_path.parts + sub_key.parts))
-
- if isinstance(value, dict):
- for s_path, s_value in extract_fields(value, field_path):
- yield s_path, s_value
- else:
- yield field_path, value
-
-
-def set_field_value(document_data, field_path, value):
- """Set a value into a document for a field_path"""
- current = document_data
- for element in field_path.parts[:-1]:
- current = current.setdefault(element, {})
- if value is _EmptyDict:
- value = {}
- current[field_path.parts[-1]] = value
-
-
-def get_field_value(document_data, field_path):
- if not field_path.parts:
- raise ValueError("Empty path")
-
- current = document_data
- for element in field_path.parts[:-1]:
- current = current[element]
- return current[field_path.parts[-1]]
-
-
-class DocumentExtractor(object):
- """ Break document data up into actual data and transforms.
-
- Handle special values such as ``DELETE_FIELD``, ``SERVER_TIMESTAMP``.
-
- Args:
- document_data (dict):
- Property names and values to use for sending a change to
- a document.
- """
-
- def __init__(self, document_data):
- self.document_data = document_data
- self.field_paths = []
- self.deleted_fields = []
- self.server_timestamps = []
- self.array_removes = {}
- self.array_unions = {}
- self.set_fields = {}
- self.empty_document = False
-
- prefix_path = FieldPath()
- iterator = self._get_document_iterator(prefix_path)
-
- for field_path, value in iterator:
-
- if field_path == prefix_path and value is _EmptyDict:
- self.empty_document = True
-
- elif value is transforms.DELETE_FIELD:
- self.deleted_fields.append(field_path)
-
- elif value is transforms.SERVER_TIMESTAMP:
- self.server_timestamps.append(field_path)
-
- elif isinstance(value, transforms.ArrayRemove):
- self.array_removes[field_path] = value.values
-
- elif isinstance(value, transforms.ArrayUnion):
- self.array_unions[field_path] = value.values
-
- else:
- self.field_paths.append(field_path)
- set_field_value(self.set_fields, field_path, value)
-
- def _get_document_iterator(self, prefix_path):
- return extract_fields(self.document_data, prefix_path)
-
- @property
- def has_transforms(self):
- return bool(self.server_timestamps or self.array_removes or self.array_unions)
-
- @property
- def transform_paths(self):
- return sorted(
- self.server_timestamps + list(self.array_removes) + list(self.array_unions)
- )
-
- def _get_update_mask(self, allow_empty_mask=False):
- return None
-
- def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):
-
- if exists is not None:
- current_document = common_pb2.Precondition(exists=exists)
- else:
- current_document = None
-
- update_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=document_path, fields=encode_dict(self.set_fields)
- ),
- update_mask=self._get_update_mask(allow_empty_mask),
- current_document=current_document,
- )
-
- return update_pb
-
- def get_transform_pb(self, document_path, exists=None):
- def make_array_value(values):
- value_list = [encode_value(element) for element in values]
- return document_pb2.ArrayValue(values=value_list)
-
- path_field_transforms = (
- [
- (
- path,
- write_pb2.DocumentTransform.FieldTransform(
- field_path=path.to_api_repr(),
- set_to_server_value=REQUEST_TIME_ENUM,
- ),
- )
- for path in self.server_timestamps
- ]
- + [
- (
- path,
- write_pb2.DocumentTransform.FieldTransform(
- field_path=path.to_api_repr(),
- remove_all_from_array=make_array_value(values),
- ),
- )
- for path, values in self.array_removes.items()
- ]
- + [
- (
- path,
- write_pb2.DocumentTransform.FieldTransform(
- field_path=path.to_api_repr(),
- append_missing_elements=make_array_value(values),
- ),
- )
- for path, values in self.array_unions.items()
- ]
- )
- field_transforms = [
- transform for path, transform in sorted(path_field_transforms)
- ]
- transform_pb = write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=field_transforms
- )
- )
- if exists is not None:
- transform_pb.current_document.CopyFrom(
- common_pb2.Precondition(exists=exists)
- )
-
- return transform_pb
-
-
-def pbs_for_create(document_path, document_data):
- """Make ``Write`` protobufs for ``create()`` methods.
-
- Args:
- document_path (str): A fully-qualified document path.
- document_data (dict): Property names and values to use for
- creating a document.
-
- Returns:
- List[google.cloud.firestore_v1beta1.types.Write]: One or two
- ``Write`` protobuf instances for ``create()``.
- """
- extractor = DocumentExtractor(document_data)
-
- if extractor.deleted_fields:
- raise ValueError("Cannot apply DELETE_FIELD in a create request.")
-
- write_pbs = []
-
- # Conformance tests require skipping the 'update_pb' if the document
- # contains only transforms.
- if extractor.empty_document or extractor.set_fields:
- write_pbs.append(extractor.get_update_pb(document_path, exists=False))
-
- if extractor.has_transforms:
- exists = None if write_pbs else False
- transform_pb = extractor.get_transform_pb(document_path, exists)
- write_pbs.append(transform_pb)
-
- return write_pbs
-
-
-def pbs_for_set_no_merge(document_path, document_data):
- """Make ``Write`` protobufs for ``set()`` methods.
-
- Args:
- document_path (str): A fully-qualified document path.
- document_data (dict): Property names and values to use for
- replacing a document.
-
- Returns:
- List[google.cloud.firestore_v1beta1.types.Write]: One
- or two ``Write`` protobuf instances for ``set()``.
- """
- extractor = DocumentExtractor(document_data)
-
- if extractor.deleted_fields:
- raise ValueError(
- "Cannot apply DELETE_FIELD in a set request without "
- "specifying 'merge=True' or 'merge=[field_paths]'."
- )
-
- # Conformance tests require send the 'update_pb' even if the document
- # contains only transforms.
- write_pbs = [extractor.get_update_pb(document_path)]
-
- if extractor.has_transforms:
- transform_pb = extractor.get_transform_pb(document_path)
- write_pbs.append(transform_pb)
-
- return write_pbs
-
-
-class DocumentExtractorForMerge(DocumentExtractor):
- """ Break document data up into actual data and transforms.
- """
-
- def __init__(self, document_data):
- super(DocumentExtractorForMerge, self).__init__(document_data)
- self.data_merge = []
- self.transform_merge = []
- self.merge = []
-
- @property
- def has_updates(self):
- # for whatever reason, the conformance tests want to see the parent
- # of nested transform paths in the update mask
- # (see set-st-merge-nonleaf-alone.textproto)
- update_paths = set(self.data_merge)
-
- for transform_path in self.transform_paths:
- if len(transform_path.parts) > 1:
- parent_fp = FieldPath(*transform_path.parts[:-1])
- update_paths.add(parent_fp)
-
- return bool(update_paths)
-
- def _apply_merge_all(self):
- self.data_merge = sorted(self.field_paths + self.deleted_fields)
- # TODO: other transforms
- self.transform_merge = self.transform_paths
- self.merge = sorted(self.data_merge + self.transform_paths)
-
- def _construct_merge_paths(self, merge):
- for merge_field in merge:
- if isinstance(merge_field, FieldPath):
- yield merge_field
- else:
- yield FieldPath(*parse_field_path(merge_field))
-
- def _normalize_merge_paths(self, merge):
- merge_paths = sorted(self._construct_merge_paths(merge))
-
- # Raise if any merge path is a parent of another. Leverage sorting
- # to avoid quadratic behavior.
- for index in range(len(merge_paths) - 1):
- lhs, rhs = merge_paths[index], merge_paths[index + 1]
- if lhs.eq_or_parent(rhs):
- raise ValueError("Merge paths overlap: {}, {}".format(lhs, rhs))
-
- for merge_path in merge_paths:
- if merge_path in self.deleted_fields:
- continue
- try:
- get_field_value(self.document_data, merge_path)
- except KeyError:
- raise ValueError("Invalid merge path: {}".format(merge_path))
-
- return merge_paths
-
- def _apply_merge_paths(self, merge):
-
- if self.empty_document:
- raise ValueError("Cannot merge specific fields with empty document.")
-
- merge_paths = self._normalize_merge_paths(merge)
-
- del self.data_merge[:]
- del self.transform_merge[:]
- self.merge = merge_paths
-
- for merge_path in merge_paths:
-
- if merge_path in self.transform_paths:
- self.transform_merge.append(merge_path)
-
- for field_path in self.field_paths:
- if merge_path.eq_or_parent(field_path):
- self.data_merge.append(field_path)
-
- # Clear out data for fields not merged.
- merged_set_fields = {}
- for field_path in self.data_merge:
- value = get_field_value(self.document_data, field_path)
- set_field_value(merged_set_fields, field_path, value)
- self.set_fields = merged_set_fields
-
- unmerged_deleted_fields = [
- field_path
- for field_path in self.deleted_fields
- if field_path not in self.merge
- ]
- if unmerged_deleted_fields:
- raise ValueError(
- "Cannot delete unmerged fields: {}".format(unmerged_deleted_fields)
- )
- self.data_merge = sorted(self.data_merge + self.deleted_fields)
-
- # Keep only transforms which are within merge.
- merged_transform_paths = set()
- for merge_path in self.merge:
- tranform_merge_paths = [
- transform_path
- for transform_path in self.transform_paths
- if merge_path.eq_or_parent(transform_path)
- ]
- merged_transform_paths.update(tranform_merge_paths)
-
- self.server_timestamps = [
- path for path in self.server_timestamps if path in merged_transform_paths
- ]
-
- self.array_removes = {
- path: values
- for path, values in self.array_removes.items()
- if path in merged_transform_paths
- }
-
- self.array_unions = {
- path: values
- for path, values in self.array_unions.items()
- if path in merged_transform_paths
- }
-
- def apply_merge(self, merge):
- if merge is True: # merge all fields
- self._apply_merge_all()
- else:
- self._apply_merge_paths(merge)
-
- def _get_update_mask(self, allow_empty_mask=False):
- # Mask uses dotted / quoted paths.
- mask_paths = [
- field_path.to_api_repr()
- for field_path in self.merge
- if field_path not in self.transform_merge
- ]
-
- if mask_paths or allow_empty_mask:
- return common_pb2.DocumentMask(field_paths=mask_paths)
-
-
-def pbs_for_set_with_merge(document_path, document_data, merge):
- """Make ``Write`` protobufs for ``set()`` methods.
-
- Args:
- document_path (str): A fully-qualified document path.
- document_data (dict): Property names and values to use for
- replacing a document.
- merge (Optional[bool] or Optional[List]):
- If True, merge all fields; else, merge only the named fields.
-
- Returns:
- List[google.cloud.firestore_v1beta1.types.Write]: One
- or two ``Write`` protobuf instances for ``set()``.
- """
- extractor = DocumentExtractorForMerge(document_data)
- extractor.apply_merge(merge)
-
- merge_empty = not document_data
-
- write_pbs = []
-
- if extractor.has_updates or merge_empty:
- write_pbs.append(
- extractor.get_update_pb(document_path, allow_empty_mask=merge_empty)
- )
-
- if extractor.transform_paths:
- transform_pb = extractor.get_transform_pb(document_path)
- write_pbs.append(transform_pb)
-
- return write_pbs
-
-
-class DocumentExtractorForUpdate(DocumentExtractor):
- """ Break document data up into actual data and transforms.
- """
-
- def __init__(self, document_data):
- super(DocumentExtractorForUpdate, self).__init__(document_data)
- self.top_level_paths = sorted(
- [FieldPath.from_string(key) for key in document_data]
- )
- tops = set(self.top_level_paths)
- for top_level_path in self.top_level_paths:
- for ancestor in top_level_path.lineage():
- if ancestor in tops:
- raise ValueError(
- "Conflicting field path: {}, {}".format(
- top_level_path, ancestor
- )
- )
-
- for field_path in self.deleted_fields:
- if field_path not in tops:
- raise ValueError(
- "Cannot update with nest delete: {}".format(field_path)
- )
-
- def _get_document_iterator(self, prefix_path):
- return extract_fields(self.document_data, prefix_path, expand_dots=True)
-
- def _get_update_mask(self, allow_empty_mask=False):
- mask_paths = []
- for field_path in self.top_level_paths:
- if field_path not in self.transform_paths:
- mask_paths.append(field_path.to_api_repr())
-
- return common_pb2.DocumentMask(field_paths=mask_paths)
-
-
-def pbs_for_update(document_path, field_updates, option):
- """Make ``Write`` protobufs for ``update()`` methods.
-
- Args:
- document_path (str): A fully-qualified document path.
- field_updates (dict): Field names or paths to update and values
- to update with.
- option (optional[~.firestore_v1beta1.client.WriteOption]): A
- write option to make assertions / preconditions on the server
- state of the document before applying changes.
-
- Returns:
- List[google.cloud.firestore_v1beta1.types.Write]: One
- or two ``Write`` protobuf instances for ``update()``.
- """
- extractor = DocumentExtractorForUpdate(field_updates)
-
- if extractor.empty_document:
- raise ValueError("Cannot update with an empty document.")
-
- if option is None: # Default is to use ``exists=True``.
- option = ExistsOption(exists=True)
-
- write_pbs = []
-
- if extractor.field_paths or extractor.deleted_fields:
- update_pb = extractor.get_update_pb(document_path)
- option.modify_write(update_pb)
- write_pbs.append(update_pb)
-
- if extractor.has_transforms:
- transform_pb = extractor.get_transform_pb(document_path)
- if not write_pbs:
- # NOTE: set the write option on the ``transform_pb`` only if there
- # is no ``update_pb``
- option.modify_write(transform_pb)
- write_pbs.append(transform_pb)
-
- return write_pbs
-
-
-def pb_for_delete(document_path, option):
- """Make a ``Write`` protobuf for ``delete()`` methods.
-
- Args:
- document_path (str): A fully-qualified document path.
- option (optional[~.firestore_v1beta1.client.WriteOption]): A
- write option to make assertions / preconditions on the server
- state of the document before applying changes.
-
- Returns:
- google.cloud.firestore_v1beta1.types.Write: A
- ``Write`` protobuf instance for the ``delete()``.
- """
- write_pb = write_pb2.Write(delete=document_path)
- if option is not None:
- option.modify_write(write_pb)
-
- return write_pb
-
-
-class ReadAfterWriteError(Exception):
- """Raised when a read is attempted after a write.
-
- Raised by "read" methods that use transactions.
- """
-
-
-def get_transaction_id(transaction, read_operation=True):
- """Get the transaction ID from a ``Transaction`` object.
-
- Args:
- transaction (Optional[~.firestore_v1beta1.transaction.\
- Transaction]): An existing transaction that this query will
- run in.
- read_operation (Optional[bool]): Indicates if the transaction ID
- will be used in a read operation. Defaults to :data:`True`.
-
- Returns:
- Optional[bytes]: The ID of the transaction, or :data:`None` if the
- ``transaction`` is :data:`None`.
-
- Raises:
- ValueError: If the ``transaction`` is not in progress (only if
- ``transaction`` is not :data:`None`).
- ReadAfterWriteError: If the ``transaction`` has writes stored on
- it and ``read_operation`` is :data:`True`.
- """
- if transaction is None:
- return None
- else:
- if not transaction.in_progress:
- raise ValueError(INACTIVE_TXN)
- if read_operation and len(transaction._write_pbs) > 0:
- raise ReadAfterWriteError(READ_AFTER_WRITE_ERROR)
- return transaction.id
-
-
-def metadata_with_prefix(prefix, **kw):
- """Create RPC metadata containing a prefix.
-
- Args:
- prefix (str): appropriate resource path.
-
- Returns:
- List[Tuple[str, str]]: RPC metadata with supplied prefix
- """
- return [("google-cloud-resource-prefix", prefix)]
-
-
-class WriteOption(object):
- """Option used to assert a condition on a write operation."""
-
- def modify_write(self, write_pb, no_create_msg=None):
- """Modify a ``Write`` protobuf based on the state of this write option.
-
- This is a virtual method intended to be implemented by subclasses.
-
- Args:
- write_pb (google.cloud.firestore_v1beta1.types.Write): A
- ``Write`` protobuf instance to be modified with a precondition
- determined by the state of this option.
- no_create_msg (Optional[str]): A message to use to indicate that
- a create operation is not allowed.
-
- Raises:
- NotImplementedError: Always, this method is virtual.
- """
- raise NotImplementedError
-
-
-class LastUpdateOption(WriteOption):
- """Option used to assert a "last update" condition on a write operation.
-
- This will typically be created by
- :meth:`~google.cloud.firestore_v1beta1.client.Client.write_option`.
-
- Args:
- last_update_time (google.protobuf.timestamp_pb2.Timestamp): A
- timestamp. When set, the target document must exist and have
- been last updated at that time. Protobuf ``update_time`` timestamps
- are typically returned from methods that perform write operations
- as part of a "write result" protobuf or directly.
- """
-
- def __init__(self, last_update_time):
- self._last_update_time = last_update_time
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._last_update_time == other._last_update_time
-
- def modify_write(self, write_pb, **unused_kwargs):
- """Modify a ``Write`` protobuf based on the state of this write option.
-
- The ``last_update_time`` is added to ``write_pb`` as an "update time"
- precondition. When set, the target document must exist and have been
- last updated at that time.
-
- Args:
- write_pb (google.cloud.firestore_v1beta1.types.Write): A
- ``Write`` protobuf instance to be modified with a precondition
- determined by the state of this option.
- unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
- other subclasses that are unused here.
- """
- current_doc = types.Precondition(update_time=self._last_update_time)
- write_pb.current_document.CopyFrom(current_doc)
-
-
-class ExistsOption(WriteOption):
- """Option used to assert existence on a write operation.
-
- This will typically be created by
- :meth:`~google.cloud.firestore_v1beta1.client.Client.write_option`.
-
- Args:
- exists (bool): Indicates if the document being modified
- should already exist.
- """
-
- def __init__(self, exists):
- self._exists = exists
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._exists == other._exists
-
- def modify_write(self, write_pb, **unused_kwargs):
- """Modify a ``Write`` protobuf based on the state of this write option.
-
- If:
-
- * ``exists=True``, adds a precondition that requires existence
- * ``exists=False``, adds a precondition that requires non-existence
-
- Args:
- write_pb (google.cloud.firestore_v1beta1.types.Write): A
- ``Write`` protobuf instance to be modified with a precondition
- determined by the state of this option.
- unused_kwargs (Dict[str, Any]): Keyword arguments accepted by
- other subclasses that are unused here.
- """
- current_doc = types.Precondition(exists=self._exists)
- write_pb.current_document.CopyFrom(current_doc)
diff --git a/google/cloud/firestore_v1beta1/collection.py b/google/cloud/firestore_v1beta1/collection.py
deleted file mode 100644
index 45b1ddae03..0000000000
--- a/google/cloud/firestore_v1beta1/collection.py
+++ /dev/null
@@ -1,478 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Classes for representing collections for the Google Cloud Firestore API."""
-import random
-import warnings
-
-import six
-
-from google.cloud.firestore_v1beta1 import _helpers
-from google.cloud.firestore_v1beta1 import query as query_mod
-from google.cloud.firestore_v1beta1.proto import document_pb2
-from google.cloud.firestore_v1beta1.watch import Watch
-from google.cloud.firestore_v1beta1 import document
-
-_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
-
-
-class CollectionReference(object):
- """A reference to a collection in a Firestore database.
-
- The collection may already exist or this class can facilitate creation
- of documents within the collection.
-
- Args:
- path (Tuple[str, ...]): The components in the collection path.
- This is a series of strings representing each collection and
- sub-collection ID, as well as the document IDs for any documents
- that contain a sub-collection.
- kwargs (dict): The keyword arguments for the constructor. The only
- supported keyword is ``client`` and it must be a
- :class:`~google.cloud.firestore_v1beta1.client.Client` if
- provided. It represents the client that created this collection
- reference.
-
- Raises:
- ValueError: if
-
- * the ``path`` is empty
- * there are an even number of elements
- * a collection ID in ``path`` is not a string
- * a document ID in ``path`` is not a string
- TypeError: If a keyword other than ``client`` is used.
- """
-
- def __init__(self, *path, **kwargs):
- _helpers.verify_path(path, is_collection=True)
- self._path = path
- self._client = kwargs.pop("client", None)
- if kwargs:
- raise TypeError(
- "Received unexpected arguments", kwargs, "Only `client` is supported"
- )
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._path == other._path and self._client == other._client
-
- @property
- def id(self):
- """The collection identifier.
-
- Returns:
- str: The last component of the path.
- """
- return self._path[-1]
-
- @property
- def parent(self):
- """Document that owns the current collection.
-
- Returns:
- Optional[~.firestore_v1beta1.document.DocumentReference]: The
- parent document, if the current collection is not a
- top-level collection.
- """
- if len(self._path) == 1:
- return None
- else:
- parent_path = self._path[:-1]
- return self._client.document(*parent_path)
-
- def document(self, document_id=None):
- """Create a sub-document underneath the current collection.
-
- Args:
- document_id (Optional[str]): The document identifier
- within the current collection. If not provided, will default
- to a random 20 character string composed of digits,
- uppercase and lowercase and letters.
-
- Returns:
- ~.firestore_v1beta1.document.DocumentReference: The child
- document.
- """
- if document_id is None:
- document_id = _auto_id()
-
- child_path = self._path + (document_id,)
- return self._client.document(*child_path)
-
- def _parent_info(self):
- """Get fully-qualified parent path and prefix for this collection.
-
- Returns:
- Tuple[str, str]: Pair of
-
- * the fully-qualified (with database and project) path to the
- parent of this collection (will either be the database path
- or a document path).
- * the prefix to a document in this collection.
- """
- parent_doc = self.parent
- if parent_doc is None:
- parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join(
- (self._client._database_string, "documents")
- )
- else:
- parent_path = parent_doc._document_path
-
- expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id))
- return parent_path, expected_prefix
-
- def add(self, document_data, document_id=None):
- """Create a document in the Firestore database with the provided data.
-
- Args:
- document_data (dict): Property names and values to use for
- creating the document.
- document_id (Optional[str]): The document identifier within the
- current collection. If not provided, an ID will be
- automatically assigned by the server (the assigned ID will be
- a random 20 character string composed of digits,
- uppercase and lowercase letters).
-
- Returns:
- Tuple[google.protobuf.timestamp_pb2.Timestamp, \
- ~.firestore_v1beta1.document.DocumentReference]: Pair of
-
- * The ``update_time`` when the document was created (or
- overwritten).
- * A document reference for the created document.
-
- Raises:
- ~google.cloud.exceptions.Conflict: If ``document_id`` is provided
- and the document already exists.
- """
- if document_id is None:
- parent_path, expected_prefix = self._parent_info()
-
- document_pb = document_pb2.Document()
-
- created_document_pb = self._client._firestore_api.create_document(
- parent_path,
- collection_id=self.id,
- document_id=None,
- document=document_pb,
- mask=None,
- metadata=self._client._rpc_metadata,
- )
-
- new_document_id = _helpers.get_doc_id(created_document_pb, expected_prefix)
- document_ref = self.document(new_document_id)
- set_result = document_ref.set(document_data)
- return set_result.update_time, document_ref
- else:
- document_ref = self.document(document_id)
- write_result = document_ref.create(document_data)
- return write_result.update_time, document_ref
-
- def list_documents(self, page_size=None):
- """List all subdocuments of the current collection.
-
- Args:
- page_size (Optional[int]]): The maximum number of documents
- in each page of results from this request. Non-positive values
- are ignored. Defaults to a sensible value set by the API.
-
- Returns:
- Sequence[~.firestore_v1beta1.collection.DocumentReference]:
- iterator of subdocuments of the current collection. If the
- collection does not exist at the time of `snapshot`, the
- iterator will be empty
- """
- parent, _ = self._parent_info()
-
- iterator = self._client._firestore_api.list_documents(
- parent,
- self.id,
- page_size=page_size,
- show_missing=True,
- metadata=self._client._rpc_metadata,
- )
- iterator.collection = self
- iterator.item_to_value = _item_to_document_ref
- return iterator
-
- def select(self, field_paths):
- """Create a "select" query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.select` for
- more information on this method.
-
- Args:
- field_paths (Iterable[str, ...]): An iterable of field paths
- (``.``-delimited list of field names) to use as a projection
- of document fields in the query results.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A "projected" query.
- """
- query = query_mod.Query(self)
- return query.select(field_paths)
-
- def where(self, field_path, op_string, value):
- """Create a "where" query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.where` for
- more information on this method.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) for the field to filter on.
- op_string (str): A comparison operation in the form of a string.
- Acceptable values are ``<``, ``<=``, ``==``, ``>=``
- and ``>``.
- value (Any): The value to compare the field against in the filter.
- If ``value`` is :data:`None` or a NaN, then ``==`` is the only
- allowed operation.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A filtered query.
- """
- query = query_mod.Query(self)
- return query.where(field_path, op_string, value)
-
- def order_by(self, field_path, **kwargs):
- """Create an "order by" query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` for
- more information on this method.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) on which to order the query results.
- kwargs (Dict[str, Any]): The keyword arguments to pass along
- to the query. The only supported keyword is ``direction``, see
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`
- for more information.
-
- Returns:
- ~.firestore_v1beta1.query.Query: An "order by" query.
- """
- query = query_mod.Query(self)
- return query.order_by(field_path, **kwargs)
-
- def limit(self, count):
- """Create a limited query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.limit` for
- more information on this method.
-
- Args:
- count (int): Maximum number of documents to return that match
- the query.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A limited query.
- """
- query = query_mod.Query(self)
- return query.limit(count)
-
- def offset(self, num_to_skip):
- """Skip to an offset in a query with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.offset` for
- more information on this method.
-
- Args:
- num_to_skip (int): The number of results to skip at the beginning
- of query results. (Must be non-negative.)
-
- Returns:
- ~.firestore_v1beta1.query.Query: An offset query.
- """
- query = query_mod.Query(self)
- return query.offset(num_to_skip)
-
- def start_at(self, document_fields):
- """Start query at a cursor with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.start_at` for
- more information on this method.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor.
- """
- query = query_mod.Query(self)
- return query.start_at(document_fields)
-
- def start_after(self, document_fields):
- """Start query after a cursor with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.start_after` for
- more information on this method.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor.
- """
- query = query_mod.Query(self)
- return query.start_after(document_fields)
-
- def end_before(self, document_fields):
- """End query before a cursor with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.end_before` for
- more information on this method.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor.
- """
- query = query_mod.Query(self)
- return query.end_before(document_fields)
-
- def end_at(self, document_fields):
- """End query at a cursor with this collection as parent.
-
- See
- :meth:`~google.cloud.firestore_v1beta1.query.Query.end_at` for
- more information on this method.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor.
- """
- query = query_mod.Query(self)
- return query.end_at(document_fields)
-
- def get(self, transaction=None):
- """Deprecated alias for :meth:`stream`."""
- warnings.warn(
- "'Collection.get' is deprecated: please use 'Collection.stream' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.stream(transaction=transaction)
-
- def stream(self, transaction=None):
- """Read the documents in this collection.
-
- This sends a ``RunQuery`` RPC and then returns an iterator which
- consumes each document returned in the stream of ``RunQueryResponse``
- messages.
-
- .. note::
-
- The underlying stream of responses will time out after
- the ``max_rpc_timeout_millis`` value set in the GAPIC
- client configuration for the ``RunQuery`` API. Snapshots
- not consumed from the iterator before that point will be lost.
-
- If a ``transaction`` is used and it already has write operations
- added, this method cannot be used (i.e. read-after-write is not
- allowed).
-
- Args:
- transaction (Optional[~.firestore_v1beta1.transaction.\
- Transaction]): An existing transaction that the query will
- run in.
-
- Yields:
- ~.firestore_v1beta1.document.DocumentSnapshot: The next
- document that fulfills the query.
- """
- query = query_mod.Query(self)
- return query.stream(transaction=transaction)
-
- def on_snapshot(self, callback):
- """Monitor the documents in this collection.
-
- This starts a watch on this collection using a background thread. The
- provided callback is run on the snapshot of the documents.
-
- Args:
- callback(~.firestore.collection.CollectionSnapshot): a callback
- to run when a change occurs.
-
- Example:
- from google.cloud import firestore_v1beta1
-
- db = firestore_v1beta1.Client()
- collection_ref = db.collection(u'users')
-
- def on_snapshot(collection_snapshot):
- for doc in collection_snapshot.documents:
- print(u'{} => {}'.format(doc.id, doc.to_dict()))
-
- # Watch this collection
- collection_watch = collection_ref.on_snapshot(on_snapshot)
-
- # Terminate this watch
- collection_watch.unsubscribe()
- """
- return Watch.for_query(
- query_mod.Query(self),
- callback,
- document.DocumentSnapshot,
- document.DocumentReference,
- )
-
-
-def _auto_id():
- """Generate a "random" automatically generated ID.
-
- Returns:
- str: A 20 character string composed of digits, uppercase and
- lowercase and letters.
- """
- return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20))
-
-
-def _item_to_document_ref(iterator, item):
- """Convert Document resource to document ref.
-
- Args:
- iterator (google.api_core.page_iterator.GRPCIterator):
- iterator response
- item (dict): document resource
- """
- document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1]
- return iterator.collection.document(document_id)
diff --git a/google/cloud/firestore_v1beta1/document.py b/google/cloud/firestore_v1beta1/document.py
deleted file mode 100644
index 8efd452556..0000000000
--- a/google/cloud/firestore_v1beta1/document.py
+++ /dev/null
@@ -1,780 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Classes for representing documents for the Google Cloud Firestore API."""
-
-import copy
-
-import six
-
-from google.api_core import exceptions
-from google.cloud.firestore_v1beta1 import _helpers
-from google.cloud.firestore_v1beta1 import field_path as field_path_module
-from google.cloud.firestore_v1beta1.proto import common_pb2
-from google.cloud.firestore_v1beta1.watch import Watch
-
-
-class DocumentReference(object):
- """A reference to a document in a Firestore database.
-
- The document may already exist or can be created by this class.
-
- Args:
- path (Tuple[str, ...]): The components in the document path.
- This is a series of strings representing each collection and
- sub-collection ID, as well as the document IDs for any documents
- that contain a sub-collection (as well as the base document).
- kwargs (dict): The keyword arguments for the constructor. The only
- supported keyword is ``client`` and it must be a
- :class:`~google.cloud.firestore_v1beta1.client.Client`.
- It represents the client that created this document reference.
-
- Raises:
- ValueError: if
-
- * the ``path`` is empty
- * there are an even number of elements
- * a collection ID in ``path`` is not a string
- * a document ID in ``path`` is not a string
- TypeError: If a keyword other than ``client`` is used.
- """
-
- _document_path_internal = None
-
- def __init__(self, *path, **kwargs):
- _helpers.verify_path(path, is_collection=False)
- self._path = path
- self._client = kwargs.pop("client", None)
- if kwargs:
- raise TypeError(
- "Received unexpected arguments", kwargs, "Only `client` is supported"
- )
-
- def __copy__(self):
- """Shallow copy the instance.
-
- We leave the client "as-is" but tuple-unpack the path.
-
- Returns:
- .DocumentReference: A copy of the current document.
- """
- result = self.__class__(*self._path, client=self._client)
- result._document_path_internal = self._document_path_internal
- return result
-
- def __deepcopy__(self, unused_memo):
- """Deep copy the instance.
-
- This isn't a true deep copy, wee leave the client "as-is" but
- tuple-unpack the path.
-
- Returns:
- .DocumentReference: A copy of the current document.
- """
- return self.__copy__()
-
- def __eq__(self, other):
- """Equality check against another instance.
-
- Args:
- other (Any): A value to compare against.
-
- Returns:
- Union[bool, NotImplementedType]: Indicating if the values are
- equal.
- """
- if isinstance(other, DocumentReference):
- return self._client == other._client and self._path == other._path
- else:
- return NotImplemented
-
- def __hash__(self):
- return hash(self._path) + hash(self._client)
-
- def __ne__(self, other):
- """Inequality check against another instance.
-
- Args:
- other (Any): A value to compare against.
-
- Returns:
- Union[bool, NotImplementedType]: Indicating if the values are
- not equal.
- """
- if isinstance(other, DocumentReference):
- return self._client != other._client or self._path != other._path
- else:
- return NotImplemented
-
- @property
- def path(self):
- """Database-relative for this document.
-
- Returns:
- str: The document's relative path.
- """
- return "/".join(self._path)
-
- @property
- def _document_path(self):
- """Create and cache the full path for this document.
-
- Of the form:
-
- ``projects/{project_id}/databases/{database_id}/...
- documents/{document_path}``
-
- Returns:
- str: The full document path.
-
- Raises:
- ValueError: If the current document reference has no ``client``.
- """
- if self._document_path_internal is None:
- if self._client is None:
- raise ValueError("A document reference requires a `client`.")
- self._document_path_internal = _get_document_path(self._client, self._path)
-
- return self._document_path_internal
-
- @property
- def id(self):
- """The document identifier (within its collection).
-
- Returns:
- str: The last component of the path.
- """
- return self._path[-1]
-
- @property
- def parent(self):
- """Collection that owns the current document.
-
- Returns:
- ~.firestore_v1beta1.collection.CollectionReference: The
- parent collection.
- """
- parent_path = self._path[:-1]
- return self._client.collection(*parent_path)
-
- def collection(self, collection_id):
- """Create a sub-collection underneath the current document.
-
- Args:
- collection_id (str): The sub-collection identifier (sometimes
- referred to as the "kind").
-
- Returns:
- ~.firestore_v1beta1.collection.CollectionReference: The
- child collection.
- """
- child_path = self._path + (collection_id,)
- return self._client.collection(*child_path)
-
- def create(self, document_data):
- """Create the current document in the Firestore database.
-
- Args:
- document_data (dict): Property names and values to use for
- creating a document.
-
- Returns:
- google.cloud.firestore_v1beta1.types.WriteResult: The
- write result corresponding to the committed document. A write
- result contains an ``update_time`` field.
-
- Raises:
- ~google.cloud.exceptions.Conflict: If the document already exists.
- """
- batch = self._client.batch()
- batch.create(self, document_data)
- write_results = batch.commit()
- return _first_write_result(write_results)
-
- def set(self, document_data, merge=False):
- """Replace the current document in the Firestore database.
-
- A write ``option`` can be specified to indicate preconditions of
- the "set" operation. If no ``option`` is specified and this document
- doesn't exist yet, this method will create it.
-
- Overwrites all content for the document with the fields in
- ``document_data``. This method performs almost the same functionality
- as :meth:`create`. The only difference is that this method doesn't
- make any requirements on the existence of the document (unless
- ``option`` is used), whereas as :meth:`create` will fail if the
- document already exists.
-
- Args:
- document_data (dict): Property names and values to use for
- replacing a document.
- merge (Optional[bool] or Optional[List]):
- If True, apply merging instead of overwriting the state
- of the document.
-
- Returns:
- google.cloud.firestore_v1beta1.types.WriteResult: The
- write result corresponding to the committed document. A write
- result contains an ``update_time`` field.
- """
- batch = self._client.batch()
- batch.set(self, document_data, merge=merge)
- write_results = batch.commit()
- return _first_write_result(write_results)
-
- def update(self, field_updates, option=None):
- """Update an existing document in the Firestore database.
-
- By default, this method verifies that the document exists on the
- server before making updates. A write ``option`` can be specified to
- override these preconditions.
-
- Each key in ``field_updates`` can either be a field name or a
- **field path** (For more information on **field paths**, see
- :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`.) To
- illustrate this, consider a document with
-
- .. code-block:: python
-
- >>> snapshot = document.get()
- >>> snapshot.to_dict()
- {
- 'foo': {
- 'bar': 'baz',
- },
- 'other': True,
- }
-
- stored on the server. If the field name is used in the update:
-
- .. code-block:: python
-
- >>> field_updates = {
- ... 'foo': {
- ... 'quux': 800,
- ... },
- ... }
- >>> document.update(field_updates)
-
- then all of ``foo`` will be overwritten on the server and the new
- value will be
-
- .. code-block:: python
-
- >>> snapshot = document.get()
- >>> snapshot.to_dict()
- {
- 'foo': {
- 'quux': 800,
- },
- 'other': True,
- }
-
- On the other hand, if a ``.``-delimited **field path** is used in the
- update:
-
- .. code-block:: python
-
- >>> field_updates = {
- ... 'foo.quux': 800,
- ... }
- >>> document.update(field_updates)
-
- then only ``foo.quux`` will be updated on the server and the
- field ``foo.bar`` will remain intact:
-
- .. code-block:: python
-
- >>> snapshot = document.get()
- >>> snapshot.to_dict()
- {
- 'foo': {
- 'bar': 'baz',
- 'quux': 800,
- },
- 'other': True,
- }
-
- .. warning::
-
- A **field path** can only be used as a top-level key in
- ``field_updates``.
-
- To delete / remove a field from an existing document, use the
- :attr:`~google.cloud.firestore_v1beta1.transforms.DELETE_FIELD`
- sentinel. So with the example above, sending
-
- .. code-block:: python
-
- >>> field_updates = {
- ... 'other': firestore.DELETE_FIELD,
- ... }
- >>> document.update(field_updates)
-
- would update the value on the server to:
-
- .. code-block:: python
-
- >>> snapshot = document.get()
- >>> snapshot.to_dict()
- {
- 'foo': {
- 'bar': 'baz',
- },
- }
-
- To set a field to the current time on the server when the
- update is received, use the
- :attr:`~google.cloud.firestore_v1beta1.transforms.SERVER_TIMESTAMP`
- sentinel. Sending
-
- .. code-block:: python
-
- >>> field_updates = {
- ... 'foo.now': firestore.SERVER_TIMESTAMP,
- ... }
- >>> document.update(field_updates)
-
- would update the value on the server to:
-
- .. code-block:: python
-
- >>> snapshot = document.get()
- >>> snapshot.to_dict()
- {
- 'foo': {
- 'bar': 'baz',
- 'now': datetime.datetime(2012, ...),
- },
- 'other': True,
- }
-
- Args:
- field_updates (dict): Field names or paths to update and values
- to update with.
- option (Optional[~.firestore_v1beta1.client.WriteOption]): A
- write option to make assertions / preconditions on the server
- state of the document before applying changes.
-
- Returns:
- google.cloud.firestore_v1beta1.types.WriteResult: The
- write result corresponding to the updated document. A write
- result contains an ``update_time`` field.
-
- Raises:
- ~google.cloud.exceptions.NotFound: If the document does not exist.
- """
- batch = self._client.batch()
- batch.update(self, field_updates, option=option)
- write_results = batch.commit()
- return _first_write_result(write_results)
-
- def delete(self, option=None):
- """Delete the current document in the Firestore database.
-
- Args:
- option (Optional[~.firestore_v1beta1.client.WriteOption]): A
- write option to make assertions / preconditions on the server
- state of the document before applying changes.
-
- Returns:
- google.protobuf.timestamp_pb2.Timestamp: The time that the delete
- request was received by the server. If the document did not exist
- when the delete was sent (i.e. nothing was deleted), this method
- will still succeed and will still return the time that the
- request was received by the server.
- """
- write_pb = _helpers.pb_for_delete(self._document_path, option)
- commit_response = self._client._firestore_api.commit(
- self._client._database_string,
- [write_pb],
- transaction=None,
- metadata=self._client._rpc_metadata,
- )
-
- return commit_response.commit_time
-
- def get(self, field_paths=None, transaction=None):
- """Retrieve a snapshot of the current document.
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
- for more information on **field paths**.
-
- If a ``transaction`` is used and it already has write operations
- added, this method cannot be used (i.e. read-after-write is not
- allowed).
-
- Args:
- field_paths (Optional[Iterable[str, ...]]): An iterable of field
- paths (``.``-delimited list of field names) to use as a
- projection of document fields in the returned results. If
- no value is provided, all fields will be returned.
- transaction (Optional[~.firestore_v1beta1.transaction.\
- Transaction]): An existing transaction that this reference
- will be retrieved in.
-
- Returns:
- ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of
- the current document. If the document does not exist at
- the time of `snapshot`, the snapshot `reference`, `data`,
- `update_time`, and `create_time` attributes will all be
- `None` and `exists` will be `False`.
- """
- if isinstance(field_paths, six.string_types):
- raise ValueError("'field_paths' must be a sequence of paths, not a string.")
-
- if field_paths is not None:
- mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
- else:
- mask = None
-
- firestore_api = self._client._firestore_api
- try:
- document_pb = firestore_api.get_document(
- self._document_path,
- mask=mask,
- transaction=_helpers.get_transaction_id(transaction),
- metadata=self._client._rpc_metadata,
- )
- except exceptions.NotFound:
- data = None
- exists = False
- create_time = None
- update_time = None
- else:
- data = _helpers.decode_dict(document_pb.fields, self._client)
- exists = True
- create_time = document_pb.create_time
- update_time = document_pb.update_time
-
- return DocumentSnapshot(
- reference=self,
- data=data,
- exists=exists,
- read_time=None, # No server read_time available
- create_time=create_time,
- update_time=update_time,
- )
-
- def collections(self, page_size=None):
- """List subcollections of the current document.
-
- Args:
- page_size (Optional[int]]): The maximum number of collections
- in each page of results from this request. Non-positive values
- are ignored. Defaults to a sensible value set by the API.
-
- Returns:
- Sequence[~.firestore_v1beta1.collection.CollectionReference]:
- iterator of subcollections of the current document. If the
- document does not exist at the time of `snapshot`, the
- iterator will be empty
- """
- iterator = self._client._firestore_api.list_collection_ids(
- self._document_path,
- page_size=page_size,
- metadata=self._client._rpc_metadata,
- )
- iterator.document = self
- iterator.item_to_value = _item_to_collection_ref
- return iterator
-
- def on_snapshot(self, callback):
- """Watch this document.
-
- This starts a watch on this document using a background thread. The
- provided callback is run on the snapshot.
-
- Args:
- callback(~.firestore.document.DocumentSnapshot):a callback to run
- when a change occurs
-
- Example:
- from google.cloud import firestore_v1beta1
-
- db = firestore_v1beta1.Client()
- collection_ref = db.collection(u'users')
-
- def on_snapshot(document_snapshot):
- doc = document_snapshot
- print(u'{} => {}'.format(doc.id, doc.to_dict()))
-
- doc_ref = db.collection(u'users').document(
- u'alovelace' + unique_resource_id())
-
- # Watch this document
- doc_watch = doc_ref.on_snapshot(on_snapshot)
-
- # Terminate this watch
- doc_watch.unsubscribe()
- """
- return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference)
-
-
-class DocumentSnapshot(object):
- """A snapshot of document data in a Firestore database.
-
- This represents data retrieved at a specific time and may not contain
- all fields stored for the document (i.e. a hand-picked selection of
- fields may have been retrieved).
-
- Instances of this class are not intended to be constructed by hand,
- rather they'll be returned as responses to various methods, such as
- :meth:`~google.cloud.DocumentReference.get`.
-
- Args:
- reference (~.firestore_v1beta1.document.DocumentReference): A
- document reference corresponding to the document that contains
- the data in this snapshot.
- data (Dict[str, Any]): The data retrieved in the snapshot.
- exists (bool): Indicates if the document existed at the time the
- snapshot was retrieved.
- read_time (google.protobuf.timestamp_pb2.Timestamp): The time that
- this snapshot was read from the server.
- create_time (google.protobuf.timestamp_pb2.Timestamp): The time that
- this document was created.
- update_time (google.protobuf.timestamp_pb2.Timestamp): The time that
- this document was last updated.
- """
-
- def __init__(self, reference, data, exists, read_time, create_time, update_time):
- self._reference = reference
- # We want immutable data, so callers can't modify this value
- # out from under us.
- self._data = copy.deepcopy(data)
- self._exists = exists
- self.read_time = read_time
- """google.protobuf.timestamp_pb2.Timestamp: Time snapshot was read."""
- self.create_time = create_time
- """google.protobuf.timestamp_pb2.Timestamp: Document's creation."""
- self.update_time = update_time
- """google.protobuf.timestamp_pb2.Timestamp: Document's last update."""
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._reference == other._reference and self._data == other._data
-
- def __hash__(self):
- seconds = self.update_time.seconds
- nanos = self.update_time.nanos
- return hash(self._reference) + hash(seconds) + hash(nanos)
-
- @property
- def _client(self):
- """The client that owns the document reference for this snapshot.
-
- Returns:
- ~.firestore_v1beta1.client.Client: The client that owns this
- document.
- """
- return self._reference._client
-
- @property
- def exists(self):
- """Existence flag.
-
- Indicates if the document existed at the time this snapshot
- was retrieved.
-
- Returns:
- bool: The existence flag.
- """
- return self._exists
-
- @property
- def id(self):
- """The document identifier (within its collection).
-
- Returns:
- str: The last component of the path of the document.
- """
- return self._reference.id
-
- @property
- def reference(self):
- """Document reference corresponding to document that owns this data.
-
- Returns:
- ~.firestore_v1beta1.document.DocumentReference: A document
- reference corresponding to this document.
- """
- return self._reference
-
- def get(self, field_path):
- """Get a value from the snapshot data.
-
- If the data is nested, for example:
-
- .. code-block:: python
-
- >>> snapshot.to_dict()
- {
- 'top1': {
- 'middle2': {
- 'bottom3': 20,
- 'bottom4': 22,
- },
- 'middle5': True,
- },
- 'top6': b'\x00\x01 foo',
- }
-
- a **field path** can be used to access the nested data. For
- example:
-
- .. code-block:: python
-
- >>> snapshot.get('top1')
- {
- 'middle2': {
- 'bottom3': 20,
- 'bottom4': 22,
- },
- 'middle5': True,
- }
- >>> snapshot.get('top1.middle2')
- {
- 'bottom3': 20,
- 'bottom4': 22,
- }
- >>> snapshot.get('top1.middle2.bottom3')
- 20
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
- for more information on **field paths**.
-
- A copy is returned since the data may contain mutable values,
- but the data stored in the snapshot must remain immutable.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names).
-
- Returns:
- Any or None:
- (A copy of) the value stored for the ``field_path`` or
- None if snapshot document does not exist.
-
- Raises:
- KeyError: If the ``field_path`` does not match nested data
- in the snapshot.
- """
- if not self._exists:
- return None
- nested_data = field_path_module.get_nested_value(field_path, self._data)
- return copy.deepcopy(nested_data)
-
- def to_dict(self):
- """Retrieve the data contained in this snapshot.
-
- A copy is returned since the data may contain mutable values,
- but the data stored in the snapshot must remain immutable.
-
- Returns:
- Dict[str, Any] or None:
- The data in the snapshot. Returns None if reference
- does not exist.
- """
- if not self._exists:
- return None
- return copy.deepcopy(self._data)
-
-
-def _get_document_path(client, path):
- """Convert a path tuple into a full path string.
-
- Of the form:
-
- ``projects/{project_id}/databases/{database_id}/...
- documents/{document_path}``
-
- Args:
- client (~.firestore_v1beta1.client.Client): The client that holds
- configuration details and a GAPIC client object.
- path (Tuple[str, ...]): The components in a document path.
-
- Returns:
- str: The fully-qualified document path.
- """
- parts = (client._database_string, "documents") + path
- return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
-
-
-def _consume_single_get(response_iterator):
- """Consume a gRPC stream that should contain a single response.
-
- The stream will correspond to a ``BatchGetDocuments`` request made
- for a single document.
-
- Args:
- response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
- streaming iterator returned from a ``BatchGetDocuments``
- request.
-
- Returns:
- ~google.cloud.proto.firestore.v1beta1.\
- firestore_pb2.BatchGetDocumentsResponse: The single "get"
- response in the batch.
-
- Raises:
- ValueError: If anything other than exactly one response is returned.
- """
- # Calling ``list()`` consumes the entire iterator.
- all_responses = list(response_iterator)
- if len(all_responses) != 1:
- raise ValueError(
- "Unexpected response from `BatchGetDocumentsResponse`",
- all_responses,
- "Expected only one result",
- )
-
- return all_responses[0]
-
-
-def _first_write_result(write_results):
- """Get first write result from list.
-
- For cases where ``len(write_results) > 1``, this assumes the writes
- occurred at the same time (e.g. if an update and transform are sent
- at the same time).
-
- Args:
- write_results (List[google.cloud.proto.firestore.v1beta1.\
- write_pb2.WriteResult, ...]: The write results from a
- ``CommitResponse``.
-
- Returns:
- google.cloud.firestore_v1beta1.types.WriteResult: The
- lone write result from ``write_results``.
-
- Raises:
- ValueError: If there are zero write results. This is likely to
- **never** occur, since the backend should be stable.
- """
- if not write_results:
- raise ValueError("Expected at least one write result")
-
- return write_results[0]
-
-
-def _item_to_collection_ref(iterator, item):
- """Convert collection ID to collection ref.
-
- Args:
- iterator (google.api_core.page_iterator.GRPCIterator):
- iterator response
- item (str): ID of the collection
- """
- return iterator.document.collection(item)
diff --git a/google/cloud/firestore_v1beta1/field_path.py b/google/cloud/firestore_v1beta1/field_path.py
deleted file mode 100644
index 1570aefb57..0000000000
--- a/google/cloud/firestore_v1beta1/field_path.py
+++ /dev/null
@@ -1,386 +0,0 @@
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for managing / converting field paths to / from strings."""
-
-try:
- from collections import abc as collections_abc
-except ImportError: # Python 2.7
- import collections as collections_abc
-
-import re
-
-import six
-
-
-_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data"
-_FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}"
-_FIELD_PATH_WRONG_TYPE = (
- "The data at {!r} is not a dictionary, so it cannot contain the key {!r}"
-)
-
-_FIELD_PATH_DELIMITER = "."
-_BACKSLASH = "\\"
-_ESCAPED_BACKSLASH = _BACKSLASH * 2
-_BACKTICK = "`"
-_ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK
-
-_SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$")
-_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]")
-PATH_ELEMENT_TOKENS = [
- ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements
- ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted
- ("DOT", r"\."), # separator
-]
-TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS)
-TOKENS_REGEX = re.compile(TOKENS_PATTERN)
-
-
-def _tokenize_field_path(path):
- """Lex a field path into tokens (including dots).
-
- Args:
- path (str): field path to be lexed.
- Returns:
- List(str): tokens
- """
- pos = 0
- get_token = TOKENS_REGEX.match
- match = get_token(path)
- while match is not None:
- type_ = match.lastgroup
- value = match.group(type_)
- yield value
- pos = match.end()
- match = get_token(path, pos)
- if pos != len(path):
- raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:]))
-
-
-def split_field_path(path):
- """Split a field path into valid elements (without dots).
-
- Args:
- path (str): field path to be lexed.
- Returns:
- List(str): tokens
- Raises:
- ValueError: if the path does not match the elements-interspersed-
- with-dots pattern.
- """
- if not path:
- return []
-
- elements = []
- want_dot = False
-
- for element in _tokenize_field_path(path):
- if want_dot:
- if element != ".":
- raise ValueError("Invalid path: {}".format(path))
- else:
- want_dot = False
- else:
- if element == ".":
- raise ValueError("Invalid path: {}".format(path))
- elements.append(element)
- want_dot = True
-
- if not want_dot or not elements:
- raise ValueError("Invalid path: {}".format(path))
-
- return elements
-
-
-def parse_field_path(api_repr):
- """Parse a **field path** from into a list of nested field names.
-
- See :func:`field_path` for more on **field paths**.
-
- Args:
- api_repr (str):
- The unique Firestore api representation which consists of
- either simple or UTF-8 field names. It cannot exceed
- 1500 bytes, and cannot be empty. Simple field names match
- ``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are
- escaped by surrounding them with backticks.
-
- Returns:
- List[str, ...]: The list of field names in the field path.
- """
- # code dredged back up from
- # https://github.com/googleapis/google-cloud-python/pull/5109/files
- field_names = []
- for field_name in split_field_path(api_repr):
- # non-simple field name
- if field_name[0] == "`" and field_name[-1] == "`":
- field_name = field_name[1:-1]
- field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK)
- field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH)
- field_names.append(field_name)
- return field_names
-
-
-def render_field_path(field_names):
- """Create a **field path** from a list of nested field names.
-
- A **field path** is a ``.``-delimited concatenation of the field
- names. It is used to represent a nested field. For example,
- in the data
-
- .. code-block: python
-
- data = {
- 'aa': {
- 'bb': {
- 'cc': 10,
- },
- },
- }
-
- the field path ``'aa.bb.cc'`` represents that data stored in
- ``data['aa']['bb']['cc']``.
-
- Args:
- field_names (Iterable[str, ...]): The list of field names.
-
- Returns:
- str: The ``.``-delimited field path.
- """
- result = []
-
- for field_name in field_names:
- match = _SIMPLE_FIELD_NAME.match(field_name)
- if match and match.group(0) == field_name:
- result.append(field_name)
- else:
- replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace(
- _BACKTICK, _ESCAPED_BACKTICK
- )
- result.append(_BACKTICK + replaced + _BACKTICK)
-
- return _FIELD_PATH_DELIMITER.join(result)
-
-
-get_field_path = render_field_path # backward-compatibility
-
-
-def get_nested_value(field_path, data):
- """Get a (potentially nested) value from a dictionary.
-
- If the data is nested, for example:
-
- .. code-block:: python
-
- >>> data
- {
- 'top1': {
- 'middle2': {
- 'bottom3': 20,
- 'bottom4': 22,
- },
- 'middle5': True,
- },
- 'top6': b'\x00\x01 foo',
- }
-
- a **field path** can be used to access the nested data. For
- example:
-
- .. code-block:: python
-
- >>> get_nested_value('top1', data)
- {
- 'middle2': {
- 'bottom3': 20,
- 'bottom4': 22,
- },
- 'middle5': True,
- }
- >>> get_nested_value('top1.middle2', data)
- {
- 'bottom3': 20,
- 'bottom4': 22,
- }
- >>> get_nested_value('top1.middle2.bottom3', data)
- 20
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` for
- more information on **field paths**.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names).
- data (Dict[str, Any]): The (possibly nested) data.
-
- Returns:
- Any: (A copy of) the value stored for the ``field_path``.
-
- Raises:
- KeyError: If the ``field_path`` does not match nested data.
- """
- field_names = parse_field_path(field_path)
-
- nested_data = data
- for index, field_name in enumerate(field_names):
- if isinstance(nested_data, collections_abc.Mapping):
- if field_name in nested_data:
- nested_data = nested_data[field_name]
- else:
- if index == 0:
- msg = _FIELD_PATH_MISSING_TOP.format(field_name)
- raise KeyError(msg)
- else:
- partial = render_field_path(field_names[:index])
- msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial)
- raise KeyError(msg)
- else:
- partial = render_field_path(field_names[:index])
- msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name)
- raise KeyError(msg)
-
- return nested_data
-
-
-class FieldPath(object):
- """Field Path object for client use.
-
- A field path is a sequence of element keys, separated by periods.
- Each element key can be either a simple identifier, or a full unicode
- string.
-
- In the string representation of a field path, non-identifier elements
- must be quoted using backticks, with internal backticks and backslashes
- escaped with a backslash.
-
- Args:
- parts: (one or more strings)
- Indicating path of the key to be used.
- """
-
- def __init__(self, *parts):
- for part in parts:
- if not isinstance(part, six.string_types) or not part:
- error = "One or more components is not a string or is empty."
- raise ValueError(error)
- self.parts = tuple(parts)
-
- @classmethod
- def from_api_repr(cls, api_repr):
- """Factory: create a FieldPath from the string formatted per the API.
-
- Args:
- api_repr (str): a string path, with non-identifier elements quoted
- It cannot exceed 1500 characters, and cannot be empty.
- Returns:
- (:class:`FieldPath`) An instance parsed from ``api_repr``.
- Raises:
- ValueError if the parsing fails
- """
- api_repr = api_repr.strip()
- if not api_repr:
- raise ValueError("Field path API representation cannot be empty.")
- return cls(*parse_field_path(api_repr))
-
- @classmethod
- def from_string(cls, path_string):
- """Factory: create a FieldPath from a unicode string representation.
-
- This method splits on the character `.` and disallows the
- characters `~*/[]`. To create a FieldPath whose components have
- those characters, call the constructor.
-
- Args:
- path_string (str): A unicode string which cannot contain
- `~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty.
-
- Returns:
- (:class:`FieldPath`) An instance parsed from ``path_string``.
- """
- try:
- return cls.from_api_repr(path_string)
- except ValueError:
- elements = path_string.split(".")
- for element in elements:
- if not element:
- raise ValueError("Empty element")
- if _LEADING_ALPHA_INVALID.match(element):
- raise ValueError(
- "Non-alphanum char in element with leading alpha: {}".format(
- element
- )
- )
- return FieldPath(*elements)
-
- def __repr__(self):
- paths = ""
- for part in self.parts:
- paths += "'" + part + "',"
- paths = paths[:-1]
- return "FieldPath({})".format(paths)
-
- def __hash__(self):
- return hash(self.to_api_repr())
-
- def __eq__(self, other):
- if isinstance(other, FieldPath):
- return self.parts == other.parts
- return NotImplemented
-
- def __lt__(self, other):
- if isinstance(other, FieldPath):
- return self.parts < other.parts
- return NotImplemented
-
- def __add__(self, other):
- """Adds `other` field path to end of this field path.
-
- Args:
- other (~google.cloud.firestore_v1beta1._helpers.FieldPath, str):
- The field path to add to the end of this `FieldPath`.
- """
- if isinstance(other, FieldPath):
- parts = self.parts + other.parts
- return FieldPath(*parts)
- elif isinstance(other, six.string_types):
- parts = self.parts + FieldPath.from_string(other).parts
- return FieldPath(*parts)
- else:
- return NotImplemented
-
- def to_api_repr(self):
- """Render a quoted string representation of the FieldPath
-
- Returns:
- (str) Quoted string representation of the path stored
- within this FieldPath.
- """
- return render_field_path(self.parts)
-
- def eq_or_parent(self, other):
- """Check whether ``other`` is an ancestor.
-
- Returns:
- (bool) True IFF ``other`` is an ancestor or equal to ``self``,
- else False.
- """
- return self.parts[: len(other.parts)] == other.parts[: len(self.parts)]
-
- def lineage(self):
- """Return field paths for all parents.
-
- Returns: Set[:class:`FieldPath`]
- """
- indexes = six.moves.range(1, len(self.parts))
- return {FieldPath(*self.parts[:index]) for index in indexes}
diff --git a/google/cloud/firestore_v1beta1/gapic/__init__.py b/google/cloud/firestore_v1beta1/gapic/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1beta1/gapic/enums.py b/google/cloud/firestore_v1beta1/gapic/enums.py
deleted file mode 100644
index ee7a9ec6f5..0000000000
--- a/google/cloud/firestore_v1beta1/gapic/enums.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Wrappers for protocol buffer enum types."""
-
-import enum
-
-
-class NullValue(enum.IntEnum):
- """
- ``NullValue`` is a singleton enumeration to represent the null value for
- the ``Value`` type union.
-
- The JSON representation for ``NullValue`` is JSON ``null``.
-
- Attributes:
- NULL_VALUE (int): Null value.
- """
-
- NULL_VALUE = 0
-
-
-class DocumentTransform(object):
- class FieldTransform(object):
- class ServerValue(enum.IntEnum):
- """
- A value that is calculated by the server.
-
- Attributes:
- SERVER_VALUE_UNSPECIFIED (int): Unspecified. This value must not be used.
- REQUEST_TIME (int): The time at which the server processed the request, with millisecond
- precision.
- """
-
- SERVER_VALUE_UNSPECIFIED = 0
- REQUEST_TIME = 1
-
-
-class StructuredQuery(object):
- class Direction(enum.IntEnum):
- """
- A sort direction.
-
- Attributes:
- DIRECTION_UNSPECIFIED (int): Unspecified.
- ASCENDING (int): Ascending.
- DESCENDING (int): Descending.
- """
-
- DIRECTION_UNSPECIFIED = 0
- ASCENDING = 1
- DESCENDING = 2
-
- class CompositeFilter(object):
- class Operator(enum.IntEnum):
- """
- A composite filter operator.
-
- Attributes:
- OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
- AND (int): The results are required to satisfy each of the combined filters.
- """
-
- OPERATOR_UNSPECIFIED = 0
- AND = 1
-
- class FieldFilter(object):
- class Operator(enum.IntEnum):
- """
- A field filter operator.
-
- Attributes:
- OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
- LESS_THAN (int): Less than. Requires that the field come first in ``order_by``.
- LESS_THAN_OR_EQUAL (int): Less than or equal. Requires that the field come first in ``order_by``.
- GREATER_THAN (int): Greater than. Requires that the field come first in ``order_by``.
- GREATER_THAN_OR_EQUAL (int): Greater than or equal. Requires that the field come first in
- ``order_by``.
- EQUAL (int): Equal.
- ARRAY_CONTAINS (int): Contains. Requires that the field is an array.
- IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most 10
- values.
- ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a
- non-empty ArrayValue with at most 10 values.
- """
-
- OPERATOR_UNSPECIFIED = 0
- LESS_THAN = 1
- LESS_THAN_OR_EQUAL = 2
- GREATER_THAN = 3
- GREATER_THAN_OR_EQUAL = 4
- EQUAL = 5
- ARRAY_CONTAINS = 7
- IN = 8
- ARRAY_CONTAINS_ANY = 9
-
- class UnaryFilter(object):
- class Operator(enum.IntEnum):
- """
- A unary operator.
-
- Attributes:
- OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used.
- IS_NAN (int): Test if a field is equal to NaN.
- IS_NULL (int): Test if an expression evaluates to Null.
- """
-
- OPERATOR_UNSPECIFIED = 0
- IS_NAN = 2
- IS_NULL = 3
-
-
-class TargetChange(object):
- class TargetChangeType(enum.IntEnum):
- """
- The type of change.
-
- Attributes:
- NO_CHANGE (int): No change has occurred. Used only to send an updated ``resume_token``.
- ADD (int): The targets have been added.
- REMOVE (int): The targets have been removed.
- CURRENT (int): The targets reflect all changes committed before the targets were added
- to the stream.
-
- This will be sent after or with a ``read_time`` that is greater than or
- equal to the time at which the targets were added.
-
- Listeners can wait for this change if read-after-write semantics are
- desired.
- RESET (int): The targets have been reset, and a new initial state for the targets
- will be returned in subsequent changes.
-
- After the initial state is complete, ``CURRENT`` will be returned even
- if the target was previously indicated to be ``CURRENT``.
- """
-
- NO_CHANGE = 0
- ADD = 1
- REMOVE = 2
- CURRENT = 3
- RESET = 4
diff --git a/google/cloud/firestore_v1beta1/gapic/firestore_client.py b/google/cloud/firestore_v1beta1/gapic/firestore_client.py
deleted file mode 100644
index 659094164e..0000000000
--- a/google/cloud/firestore_v1beta1/gapic/firestore_client.py
+++ /dev/null
@@ -1,1461 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Accesses the google.firestore.v1beta1 Firestore API."""
-
-import functools
-import pkg_resources
-import warnings
-
-from google.oauth2 import service_account
-import google.api_core.client_options
-import google.api_core.gapic_v1.client_info
-import google.api_core.gapic_v1.config
-import google.api_core.gapic_v1.method
-import google.api_core.gapic_v1.routing_header
-import google.api_core.grpc_helpers
-import google.api_core.page_iterator
-import google.api_core.path_template
-import google.api_core.protobuf_helpers
-import grpc
-
-from google.cloud.firestore_v1beta1.gapic import enums
-from google.cloud.firestore_v1beta1.gapic import firestore_client_config
-from google.cloud.firestore_v1beta1.gapic.transports import firestore_grpc_transport
-from google.cloud.firestore_v1beta1.proto import common_pb2
-from google.cloud.firestore_v1beta1.proto import document_pb2
-from google.cloud.firestore_v1beta1.proto import firestore_pb2
-from google.cloud.firestore_v1beta1.proto import firestore_pb2_grpc
-from google.cloud.firestore_v1beta1.proto import query_pb2
-from google.cloud.firestore_v1beta1.proto import write_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import timestamp_pb2
-
-
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
- "google-cloud-firestore"
-).version
-
-
-class FirestoreClient(object):
- """
- The Cloud Firestore service.
-
- This service exposes several types of comparable timestamps:
-
- - ``create_time`` - The time at which a document was created. Changes
- only when a document is deleted, then re-created. Increases in a
- strict monotonic fashion.
- - ``update_time`` - The time at which a document was last updated.
- Changes every time a document is modified. Does not change when a
- write results in no modifications. Increases in a strict monotonic
- fashion.
- - ``read_time`` - The time at which a particular state was observed.
- Used to denote a consistent snapshot of the database or the time at
- which a Document was observed to not exist.
- - ``commit_time`` - The time at which the writes in a transaction were
- committed. Any read with an equal or greater ``read_time`` is
- guaranteed to see the effects of the transaction.
- """
-
- SERVICE_ADDRESS = "firestore.googleapis.com:443"
- """The default address of the service."""
-
- # The name of the interface for this client. This is the key used to
- # find the method configuration in the client_config dictionary.
- _INTERFACE_NAME = "google.firestore.v1beta1.Firestore"
-
- @classmethod
- def from_service_account_file(cls, filename, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- FirestoreClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @classmethod
- def any_path_path(cls, project, database, document, any_path):
- """Return a fully-qualified any_path string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/documents/{document}/{any_path=**}",
- project=project,
- database=database,
- document=document,
- any_path=any_path,
- )
-
- @classmethod
- def database_root_path(cls, project, database):
- """Return a fully-qualified database_root string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}",
- project=project,
- database=database,
- )
-
- @classmethod
- def document_path_path(cls, project, database, document_path):
- """Return a fully-qualified document_path string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/documents/{document_path=**}",
- project=project,
- database=database,
- document_path=document_path,
- )
-
- @classmethod
- def document_root_path(cls, project, database):
- """Return a fully-qualified document_root string."""
- return google.api_core.path_template.expand(
- "projects/{project}/databases/{database}/documents",
- project=project,
- database=database,
- )
-
- def __init__(
- self,
- transport=None,
- channel=None,
- credentials=None,
- client_config=None,
- client_info=None,
- client_options=None,
- ):
- """Constructor.
-
- Args:
- transport (Union[~.FirestoreGrpcTransport,
- Callable[[~.Credentials, type], ~.FirestoreGrpcTransport]): A transport
- instance, responsible for actually making the API calls.
- The default transport uses the gRPC protocol.
- This argument may also be a callable which returns a
- transport instance. Callables will be sent the credentials
- as the first argument and the default transport class as
- the second argument.
- channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
- through which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- This argument is mutually exclusive with providing a
- transport instance to ``transport``; doing so will raise
- an exception.
- client_config (dict): DEPRECATED. A dictionary of call options for
- each method. If not specified, the default configuration is used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- client_options (Union[dict, google.api_core.client_options.ClientOptions]):
- Client options used to set user options on the client. API Endpoint
- should be set through client_options.
- """
- # Raise deprecation warnings for things we want to go away.
- if client_config is not None:
- warnings.warn(
- "The `client_config` argument is deprecated.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
- else:
- client_config = firestore_client_config.config
-
- if channel:
- warnings.warn(
- "The `channel` argument is deprecated; use " "`transport` instead.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
-
- api_endpoint = self.SERVICE_ADDRESS
- if client_options:
- if type(client_options) == dict:
- client_options = google.api_core.client_options.from_dict(
- client_options
- )
- if client_options.api_endpoint:
- api_endpoint = client_options.api_endpoint
-
- # Instantiate the transport.
- # The transport is responsible for handling serialization and
- # deserialization and actually sending data to the service.
- if transport:
- if callable(transport):
- self.transport = transport(
- credentials=credentials,
- default_class=firestore_grpc_transport.FirestoreGrpcTransport,
- address=api_endpoint,
- )
- else:
- if credentials:
- raise ValueError(
- "Received both a transport instance and "
- "credentials; these are mutually exclusive."
- )
- self.transport = transport
- else:
- self.transport = firestore_grpc_transport.FirestoreGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
- )
-
- if client_info is None:
- client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
- )
- else:
- client_info.gapic_version = _GAPIC_LIBRARY_VERSION
- self._client_info = client_info
-
- # Parse out the default settings for retry and timeout for each RPC
- # from the client configuration.
- # (Ordinarily, these are the defaults specified in the `*_config.py`
- # file next to this one.)
- self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
- )
-
- # Save a dictionary of cached API call functions.
- # These are the actual callables which invoke the proper
- # transport methods, wrapped with `wrap_method` to add retry,
- # timeout, and the like.
- self._inner_api_calls = {}
-
- # Service calls
- def get_document(
- self,
- name,
- mask=None,
- transaction=None,
- read_time=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets a single document.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> response = client.get_document(name)
-
- Args:
- name (str): Required. The resource name of the Document to get. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If the document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
- transaction (bytes): Reads the document in a transaction.
- read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads the version of the document at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1beta1.types.Document` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_document,
- default_retry=self._method_configs["GetDocument"].retry,
- default_timeout=self._method_configs["GetDocument"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction, read_time=read_time
- )
-
- request = firestore_pb2.GetDocumentRequest(
- name=name, mask=mask, transaction=transaction, read_time=read_time
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def list_documents(
- self,
- parent,
- collection_id,
- page_size=None,
- order_by=None,
- mask=None,
- transaction=None,
- read_time=None,
- show_missing=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists documents.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> # TODO: Initialize `collection_id`:
- >>> collection_id = ''
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_documents(parent, collection_id):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_documents(parent, collection_id).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- For example: ``projects/my-project/databases/my-database/documents`` or
- ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
- example: ``chatrooms`` or ``messages``.
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- order_by (str): The order to sort results by. For example: ``priority desc, name``.
- mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If a document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
- transaction (bytes): Reads documents in a transaction.
- read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads documents as they were at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
- show_missing (bool): If the list should show missing documents. A missing document is a
- document that does not exist but has sub-documents. These documents will
- be returned with a key but will not have fields,
- ``Document.create_time``, or ``Document.update_time`` set.
-
- Requests with ``show_missing`` may not specify ``where`` or
- ``order_by``.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`~google.cloud.firestore_v1beta1.types.Document` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_documents" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_documents"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_documents,
- default_retry=self._method_configs["ListDocuments"].retry,
- default_timeout=self._method_configs["ListDocuments"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction, read_time=read_time
- )
-
- request = firestore_pb2.ListDocumentsRequest(
- parent=parent,
- collection_id=collection_id,
- page_size=page_size,
- order_by=order_by,
- mask=mask,
- transaction=transaction,
- read_time=read_time,
- show_missing=show_missing,
- )
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_documents"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="documents",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
-
- def create_document(
- self,
- parent,
- collection_id,
- document_id,
- document,
- mask=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates a new document.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> # TODO: Initialize `collection_id`:
- >>> collection_id = ''
- >>>
- >>> # TODO: Initialize `document_id`:
- >>> document_id = ''
- >>>
- >>> # TODO: Initialize `document`:
- >>> document = {}
- >>>
- >>> response = client.create_document(parent, collection_id, document_id, document)
-
- Args:
- parent (str): Required. The parent resource. For example:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}``
- collection_id (str): Required. The collection ID, relative to ``parent``, to list. For
- example: ``chatrooms``.
- document_id (str): The client-assigned document ID to use for this document.
-
- Optional. If not specified, an ID will be assigned by the service.
- document (Union[dict, ~google.cloud.firestore_v1beta1.types.Document]): Required. The document to create. ``name`` must not be set.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Document`
- mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If the document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1beta1.types.Document` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_document,
- default_retry=self._method_configs["CreateDocument"].retry,
- default_timeout=self._method_configs["CreateDocument"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.CreateDocumentRequest(
- parent=parent,
- collection_id=collection_id,
- document_id=document_id,
- document=document,
- mask=mask,
- )
- return self._inner_api_calls["create_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def update_document(
- self,
- document,
- update_mask,
- mask=None,
- current_document=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Updates or inserts a document.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> # TODO: Initialize `document`:
- >>> document = {}
- >>>
- >>> # TODO: Initialize `update_mask`:
- >>> update_mask = {}
- >>>
- >>> response = client.update_document(document, update_mask)
-
- Args:
- document (Union[dict, ~google.cloud.firestore_v1beta1.types.Document]): Required. The updated document.
- Creates the document if it does not already exist.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Document`
- update_mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to update.
- None of the field paths in the mask may contain a reserved name.
-
- If the document exists on the server and has fields not referenced in the
- mask, they are left unchanged.
- Fields referenced in the mask, but not present in the input document, are
- deleted from the document on the server.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
- mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If the document has a field that is not present in this mask, that field
- will not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
- current_document (Union[dict, ~google.cloud.firestore_v1beta1.types.Precondition]): An optional precondition on the document.
- The request will fail if this is set and not met by the target document.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Precondition`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1beta1.types.Document` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "update_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "update_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.update_document,
- default_retry=self._method_configs["UpdateDocument"].retry,
- default_timeout=self._method_configs["UpdateDocument"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.UpdateDocumentRequest(
- document=document,
- update_mask=update_mask,
- mask=mask,
- current_document=current_document,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("document.name", document.name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["update_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def delete_document(
- self,
- name,
- current_document=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Deletes a document.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> client.delete_document(name)
-
- Args:
- name (str): Required. The resource name of the Document to delete. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- current_document (Union[dict, ~google.cloud.firestore_v1beta1.types.Precondition]): An optional precondition on the document.
- The request will fail if this is set and not met by the target document.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Precondition`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "delete_document" not in self._inner_api_calls:
- self._inner_api_calls[
- "delete_document"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.delete_document,
- default_retry=self._method_configs["DeleteDocument"].retry,
- default_timeout=self._method_configs["DeleteDocument"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.DeleteDocumentRequest(
- name=name, current_document=current_document
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- self._inner_api_calls["delete_document"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def batch_get_documents(
- self,
- database,
- documents,
- mask=None,
- transaction=None,
- new_transaction=None,
- read_time=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets multiple documents.
-
- Documents returned by this method are not guaranteed to be returned in the
- same order that they were requested.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> # TODO: Initialize `documents`:
- >>> documents = []
- >>>
- >>> for element in client.batch_get_documents(database, documents):
- ... # process element
- ... pass
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- documents (list[str]): The names of the documents to retrieve. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- The request will fail if any of the document is not a child resource of
- the given ``database``. Duplicate names will be elided.
- mask (Union[dict, ~google.cloud.firestore_v1beta1.types.DocumentMask]): The fields to return. If not set, returns all fields.
-
- If a document has a field that is not present in this mask, that field will
- not be returned in the response.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.DocumentMask`
- transaction (bytes): Reads documents in a transaction.
- new_transaction (Union[dict, ~google.cloud.firestore_v1beta1.types.TransactionOptions]): Starts a new transaction and reads the documents.
- Defaults to a read-only transaction.
- The new transaction ID will be returned as the first response in the
- stream.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.TransactionOptions`
- read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads documents as they were at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1beta1.types.BatchGetDocumentsResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "batch_get_documents" not in self._inner_api_calls:
- self._inner_api_calls[
- "batch_get_documents"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.batch_get_documents,
- default_retry=self._method_configs["BatchGetDocuments"].retry,
- default_timeout=self._method_configs["BatchGetDocuments"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
-
- request = firestore_pb2.BatchGetDocumentsRequest(
- database=database,
- documents=documents,
- mask=mask,
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["batch_get_documents"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def begin_transaction(
- self,
- database,
- options_=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Starts a new transaction.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> response = client.begin_transaction(database)
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- options_ (Union[dict, ~google.cloud.firestore_v1beta1.types.TransactionOptions]): The options for the transaction.
- Defaults to a read-write transaction.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.TransactionOptions`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1beta1.types.BeginTransactionResponse` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "begin_transaction" not in self._inner_api_calls:
- self._inner_api_calls[
- "begin_transaction"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.begin_transaction,
- default_retry=self._method_configs["BeginTransaction"].retry,
- default_timeout=self._method_configs["BeginTransaction"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.BeginTransactionRequest(
- database=database, options=options_
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["begin_transaction"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def commit(
- self,
- database,
- writes,
- transaction=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Commits a transaction, while optionally updating documents.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> # TODO: Initialize `writes`:
- >>> writes = []
- >>>
- >>> response = client.commit(database, writes)
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- writes (list[Union[dict, ~google.cloud.firestore_v1beta1.types.Write]]): The writes to apply.
-
- Always executed atomically and in order.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Write`
- transaction (bytes): If set, applies all writes in this transaction, and commits it.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.firestore_v1beta1.types.CommitResponse` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "commit" not in self._inner_api_calls:
- self._inner_api_calls[
- "commit"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.commit,
- default_retry=self._method_configs["Commit"].retry,
- default_timeout=self._method_configs["Commit"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.CommitRequest(
- database=database, writes=writes, transaction=transaction
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["commit"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def rollback(
- self,
- database,
- transaction,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Rolls back a transaction.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>>
- >>> # TODO: Initialize `transaction`:
- >>> transaction = b''
- >>>
- >>> client.rollback(database, transaction)
-
- Args:
- database (str): Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- transaction (bytes): Required. The transaction to roll back.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "rollback" not in self._inner_api_calls:
- self._inner_api_calls[
- "rollback"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.rollback,
- default_retry=self._method_configs["Rollback"].retry,
- default_timeout=self._method_configs["Rollback"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.RollbackRequest(
- database=database, transaction=transaction
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("database", database)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- self._inner_api_calls["rollback"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def run_query(
- self,
- parent,
- structured_query=None,
- transaction=None,
- new_transaction=None,
- read_time=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Runs a query.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> for element in client.run_query(parent):
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- For example: ``projects/my-project/databases/my-database/documents`` or
- ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- structured_query (Union[dict, ~google.cloud.firestore_v1beta1.types.StructuredQuery]): A structured query.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.StructuredQuery`
- transaction (bytes): Reads documents in a transaction.
- new_transaction (Union[dict, ~google.cloud.firestore_v1beta1.types.TransactionOptions]): Starts a new transaction and reads the documents.
- Defaults to a read-only transaction.
- The new transaction ID will be returned as the first response in the
- stream.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.TransactionOptions`
- read_time (Union[dict, ~google.cloud.firestore_v1beta1.types.Timestamp]): Reads documents as they were at the given time.
- This may not be older than 60 seconds.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.firestore_v1beta1.types.Timestamp`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1beta1.types.RunQueryResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "run_query" not in self._inner_api_calls:
- self._inner_api_calls[
- "run_query"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.run_query,
- default_retry=self._method_configs["RunQuery"].retry,
- default_timeout=self._method_configs["RunQuery"].timeout,
- client_info=self._client_info,
- )
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query)
-
- # Sanity check: We have some fields which are mutually exclusive;
- # raise ValueError if more than one is sent.
- google.api_core.protobuf_helpers.check_oneof(
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
-
- request = firestore_pb2.RunQueryRequest(
- parent=parent,
- structured_query=structured_query,
- transaction=transaction,
- new_transaction=new_transaction,
- read_time=read_time,
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["run_query"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def write(
- self,
- requests,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Streams batches of document updates and deletes, in order.
-
- EXPERIMENTAL: This method interface might change in the future.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>> request = {'database': database}
- >>>
- >>> requests = [request]
- >>> for element in client.write(requests):
- ... # process element
- ... pass
-
- Args:
- requests (iterator[dict|google.cloud.firestore_v1beta1.proto.firestore_pb2.WriteRequest]): The input objects. If a dict is provided, it must be of the
- same form as the protobuf message :class:`~google.cloud.firestore_v1beta1.types.WriteRequest`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1beta1.types.WriteResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "write" not in self._inner_api_calls:
- self._inner_api_calls[
- "write"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.write,
- default_retry=self._method_configs["Write"].retry,
- default_timeout=self._method_configs["Write"].timeout,
- client_info=self._client_info,
- )
-
- return self._inner_api_calls["write"](
- requests, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def listen(
- self,
- requests,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Listens to changes.
-
- EXPERIMENTAL: This method interface might change in the future.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> database = client.database_root_path('[PROJECT]', '[DATABASE]')
- >>> request = {'database': database}
- >>>
- >>> requests = [request]
- >>> for element in client.listen(requests):
- ... # process element
- ... pass
-
- Args:
- requests (iterator[dict|google.cloud.firestore_v1beta1.proto.firestore_pb2.ListenRequest]): The input objects. If a dict is provided, it must be of the
- same form as the protobuf message :class:`~google.cloud.firestore_v1beta1.types.ListenRequest`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- Iterable[~google.cloud.firestore_v1beta1.types.ListenResponse].
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "listen" not in self._inner_api_calls:
- self._inner_api_calls[
- "listen"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.listen,
- default_retry=self._method_configs["Listen"].retry,
- default_timeout=self._method_configs["Listen"].timeout,
- client_info=self._client_info,
- )
-
- return self._inner_api_calls["listen"](
- requests, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def list_collection_ids(
- self,
- parent,
- page_size=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists all the collection IDs underneath a document.
-
- Example:
- >>> from google.cloud import firestore_v1beta1
- >>>
- >>> client = firestore_v1beta1.FirestoreClient()
- >>>
- >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]')
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_collection_ids(parent):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_collection_ids(parent).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The parent document. In the format:
- ``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- For example:
- ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`str` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_collection_ids" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_collection_ids"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_collection_ids,
- default_retry=self._method_configs["ListCollectionIds"].retry,
- default_timeout=self._method_configs["ListCollectionIds"].timeout,
- client_info=self._client_info,
- )
-
- request = firestore_pb2.ListCollectionIdsRequest(
- parent=parent, page_size=page_size
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_collection_ids"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="collection_ids",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
diff --git a/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py b/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py
deleted file mode 100644
index dd458fe976..0000000000
--- a/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py
+++ /dev/null
@@ -1,97 +0,0 @@
-config = {
- "interfaces": {
- "google.firestore.v1beta1.Firestore": {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
- "total_timeout_millis": 600000,
- },
- "streaming": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 300000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 300000,
- "total_timeout_millis": 600000,
- },
- },
- "methods": {
- "GetDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "ListDocuments": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "CreateDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "UpdateDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "DeleteDocument": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "BatchGetDocuments": {
- "timeout_millis": 300000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "streaming",
- },
- "BeginTransaction": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "Commit": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "Rollback": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "RunQuery": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "streaming",
- },
- "Write": {
- "timeout_millis": 86400000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "streaming",
- },
- "Listen": {
- "timeout_millis": 86400000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "streaming",
- },
- "ListCollectionIds": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- },
- }
- }
-}
diff --git a/google/cloud/firestore_v1beta1/gapic/transports/__init__.py b/google/cloud/firestore_v1beta1/gapic/transports/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py b/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py
deleted file mode 100644
index 9f26080c82..0000000000
--- a/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import google.api_core.grpc_helpers
-
-from google.cloud.firestore_v1beta1.proto import firestore_pb2_grpc
-
-
-class FirestoreGrpcTransport(object):
- """gRPC transport class providing stubs for
- google.firestore.v1beta1 Firestore API.
-
- The transport provides access to the raw gRPC stubs,
- which can be used to take advantage of advanced
- features of gRPC.
- """
-
- # The scopes needed to make gRPC calls to all of the methods defined
- # in this service.
- _OAUTH_SCOPES = (
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/datastore",
- )
-
- def __init__(
- self, channel=None, credentials=None, address="firestore.googleapis.com:443"
- ):
- """Instantiate the transport class.
-
- Args:
- channel (grpc.Channel): A ``Channel`` instance through
- which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- address (str): The address where the service is hosted.
- """
- # If both `channel` and `credentials` are specified, raise an
- # exception (channels come with credentials baked in already).
- if channel is not None and credentials is not None:
- raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
- )
-
- # Create the channel.
- if channel is None:
- channel = self.create_channel(
- address=address,
- credentials=credentials,
- options={
- "grpc.max_send_message_length": -1,
- "grpc.max_receive_message_length": -1,
- }.items(),
- )
-
- self._channel = channel
-
- # gRPC uses objects called "stubs" that are bound to the
- # channel and provide a basic method for each RPC.
- self._stubs = {"firestore_stub": firestore_pb2_grpc.FirestoreStub(channel)}
-
- @classmethod
- def create_channel(
- cls, address="firestore.googleapis.com:443", credentials=None, **kwargs
- ):
- """Create and return a gRPC channel object.
-
- Args:
- address (str): The host for the channel to use.
- credentials (~.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If
- none are specified, the client will attempt to ascertain
- the credentials from the environment.
- kwargs (dict): Keyword arguments, which are passed to the
- channel creation.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return google.api_core.grpc_helpers.create_channel(
- address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
- )
-
- @property
- def channel(self):
- """The gRPC channel used by the transport.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return self._channel
-
- @property
- def get_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.get_document`.
-
- Gets a single document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].GetDocument
-
- @property
- def list_documents(self):
- """Return the gRPC stub for :meth:`FirestoreClient.list_documents`.
-
- Lists documents.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].ListDocuments
-
- @property
- def create_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.create_document`.
-
- Creates a new document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].CreateDocument
-
- @property
- def update_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.update_document`.
-
- Updates or inserts a document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].UpdateDocument
-
- @property
- def delete_document(self):
- """Return the gRPC stub for :meth:`FirestoreClient.delete_document`.
-
- Deletes a document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].DeleteDocument
-
- @property
- def batch_get_documents(self):
- """Return the gRPC stub for :meth:`FirestoreClient.batch_get_documents`.
-
- Gets multiple documents.
-
- Documents returned by this method are not guaranteed to be returned in the
- same order that they were requested.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].BatchGetDocuments
-
- @property
- def begin_transaction(self):
- """Return the gRPC stub for :meth:`FirestoreClient.begin_transaction`.
-
- Starts a new transaction.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].BeginTransaction
-
- @property
- def commit(self):
- """Return the gRPC stub for :meth:`FirestoreClient.commit`.
-
- Commits a transaction, while optionally updating documents.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Commit
-
- @property
- def rollback(self):
- """Return the gRPC stub for :meth:`FirestoreClient.rollback`.
-
- Rolls back a transaction.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Rollback
-
- @property
- def run_query(self):
- """Return the gRPC stub for :meth:`FirestoreClient.run_query`.
-
- Runs a query.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].RunQuery
-
- @property
- def write(self):
- """Return the gRPC stub for :meth:`FirestoreClient.write`.
-
- Streams batches of document updates and deletes, in order.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Write
-
- @property
- def listen(self):
- """Return the gRPC stub for :meth:`FirestoreClient.listen`.
-
- Listens to changes.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].Listen
-
- @property
- def list_collection_ids(self):
- """Return the gRPC stub for :meth:`FirestoreClient.list_collection_ids`.
-
- Lists all the collection IDs underneath a document.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["firestore_stub"].ListCollectionIds
diff --git a/google/cloud/firestore_v1beta1/order.py b/google/cloud/firestore_v1beta1/order.py
deleted file mode 100644
index 79207f530c..0000000000
--- a/google/cloud/firestore_v1beta1/order.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from google.cloud.firestore_v1beta1._helpers import decode_value
-import math
-
-
-class TypeOrder(Enum):
- # NOTE: This order is defined by the backend and cannot be changed.
- NULL = 0
- BOOLEAN = 1
- NUMBER = 2
- TIMESTAMP = 3
- STRING = 4
- BLOB = 5
- REF = 6
- GEO_POINT = 7
- ARRAY = 8
- OBJECT = 9
-
- @staticmethod
- def from_value(value):
- v = value.WhichOneof("value_type")
-
- lut = {
- "null_value": TypeOrder.NULL,
- "boolean_value": TypeOrder.BOOLEAN,
- "integer_value": TypeOrder.NUMBER,
- "double_value": TypeOrder.NUMBER,
- "timestamp_value": TypeOrder.TIMESTAMP,
- "string_value": TypeOrder.STRING,
- "bytes_value": TypeOrder.BLOB,
- "reference_value": TypeOrder.REF,
- "geo_point_value": TypeOrder.GEO_POINT,
- "array_value": TypeOrder.ARRAY,
- "map_value": TypeOrder.OBJECT,
- }
-
- if v not in lut:
- raise ValueError("Could not detect value type for " + v)
- return lut[v]
-
-
-class Order(object):
- """
- Order implements the ordering semantics of the backend.
- """
-
- @classmethod
- def compare(cls, left, right):
- """
- Main comparison function for all Firestore types.
- @return -1 is left < right, 0 if left == right, otherwise 1
- """
- # First compare the types.
- leftType = TypeOrder.from_value(left).value
- rightType = TypeOrder.from_value(right).value
-
- if leftType != rightType:
- if leftType < rightType:
- return -1
- return 1
-
- value_type = left.WhichOneof("value_type")
-
- if value_type == "null_value":
- return 0 # nulls are all equal
- elif value_type == "boolean_value":
- return cls._compare_to(left.boolean_value, right.boolean_value)
- elif value_type == "integer_value":
- return cls.compare_numbers(left, right)
- elif value_type == "double_value":
- return cls.compare_numbers(left, right)
- elif value_type == "timestamp_value":
- return cls.compare_timestamps(left, right)
- elif value_type == "string_value":
- return cls._compare_to(left.string_value, right.string_value)
- elif value_type == "bytes_value":
- return cls.compare_blobs(left, right)
- elif value_type == "reference_value":
- return cls.compare_resource_paths(left, right)
- elif value_type == "geo_point_value":
- return cls.compare_geo_points(left, right)
- elif value_type == "array_value":
- return cls.compare_arrays(left, right)
- elif value_type == "map_value":
- return cls.compare_objects(left, right)
- else:
- raise ValueError("Unknown ``value_type``", str(value_type))
-
- @staticmethod
- def compare_blobs(left, right):
- left_bytes = left.bytes_value
- right_bytes = right.bytes_value
-
- return Order._compare_to(left_bytes, right_bytes)
-
- @staticmethod
- def compare_timestamps(left, right):
- left = left.timestamp_value
- right = right.timestamp_value
-
- seconds = Order._compare_to(left.seconds or 0, right.seconds or 0)
- if seconds != 0:
- return seconds
-
- return Order._compare_to(left.nanos or 0, right.nanos or 0)
-
- @staticmethod
- def compare_geo_points(left, right):
- left_value = decode_value(left, None)
- right_value = decode_value(right, None)
- cmp = (left_value.latitude > right_value.latitude) - (
- left_value.latitude < right_value.latitude
- )
-
- if cmp != 0:
- return cmp
- return (left_value.longitude > right_value.longitude) - (
- left_value.longitude < right_value.longitude
- )
-
- @staticmethod
- def compare_resource_paths(left, right):
- left = left.reference_value
- right = right.reference_value
-
- left_segments = left.split("/")
- right_segments = right.split("/")
- shorter = min(len(left_segments), len(right_segments))
- # compare segments
- for i in range(shorter):
- if left_segments[i] < right_segments[i]:
- return -1
- if left_segments[i] > right_segments[i]:
- return 1
-
- left_length = len(left)
- right_length = len(right)
- return (left_length > right_length) - (left_length < right_length)
-
- @staticmethod
- def compare_arrays(left, right):
- l_values = left.array_value.values
- r_values = right.array_value.values
-
- length = min(len(l_values), len(r_values))
- for i in range(length):
- cmp = Order.compare(l_values[i], r_values[i])
- if cmp != 0:
- return cmp
-
- return Order._compare_to(len(l_values), len(r_values))
-
- @staticmethod
- def compare_objects(left, right):
- left_fields = left.map_value.fields
- right_fields = right.map_value.fields
-
- for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)):
- keyCompare = Order._compare_to(left_key, right_key)
- if keyCompare != 0:
- return keyCompare
-
- value_compare = Order.compare(
- left_fields[left_key], right_fields[right_key]
- )
- if value_compare != 0:
- return value_compare
-
- return Order._compare_to(len(left_fields), len(right_fields))
-
- @staticmethod
- def compare_numbers(left, right):
- left_value = decode_value(left, None)
- right_value = decode_value(right, None)
- return Order.compare_doubles(left_value, right_value)
-
- @staticmethod
- def compare_doubles(left, right):
- if math.isnan(left):
- if math.isnan(right):
- return 0
- return -1
- if math.isnan(right):
- return 1
-
- return Order._compare_to(left, right)
-
- @staticmethod
- def _compare_to(left, right):
- # We can't just use cmp(left, right) because cmp doesn't exist
- # in Python 3, so this is an equivalent suggested by
- # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
- return (left > right) - (left < right)
diff --git a/google/cloud/firestore_v1beta1/proto/__init__.py b/google/cloud/firestore_v1beta1/proto/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1beta1/proto/admin/__init__.py b/google/cloud/firestore_v1beta1/proto/admin/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py b/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py
deleted file mode 100644
index 9bb7f6553b..0000000000
--- a/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py
+++ /dev/null
@@ -1,1343 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.cloud.firestore_v1beta1.proto.admin import (
- index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2,
-)
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto",
- package="google.firestore.admin.v1beta1",
- syntax="proto3",
- serialized_pb=_b(
- '\n@google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x36google/cloud/firestore_v1beta1/proto/admin/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x03\n\x16IndexOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05index\x18\x03 \x01(\t\x12\\\n\x0eoperation_type\x18\x04 \x01(\x0e\x32\x44.google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType\x12\x11\n\tcancelled\x18\x05 \x01(\x08\x12\x43\n\x11\x64ocument_progress\x18\x06 \x01(\x0b\x32(.google.firestore.admin.v1beta1.Progress"C\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x43REATING_INDEX\x10\x01":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03"Z\n\x12\x43reateIndexRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x34\n\x05index\x18\x02 \x01(\x0b\x32%.google.firestore.admin.v1beta1.Index"\x1f\n\x0fGetIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"[\n\x12ListIndexesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t""\n\x12\x44\x65leteIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"f\n\x13ListIndexesResponse\x12\x36\n\x07indexes\x18\x01 \x03(\x0b\x32%.google.firestore.admin.v1beta1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x9c\x05\n\x0e\x46irestoreAdmin\x12\xa1\x01\n\x0b\x43reateIndex\x12\x32.google.firestore.admin.v1beta1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"?\x82\xd3\xe4\x93\x02\x39"0/v1beta1/{parent=projects/*/databases/*}/indexes:\x05index\x12\xb0\x01\n\x0bListIndexes\x12\x32.google.firestore.admin.v1beta1.ListIndexesRequest\x1a\x33.google.firestore.admin.v1beta1.ListIndexesResponse"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{parent=projects/*/databases/*}/indexes\x12\x9c\x01\n\x08GetIndex\x12/.google.firestore.admin.v1beta1.GetIndexRequest\x1a%.google.firestore.admin.v1beta1.Index"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{name=projects/*/databases/*/indexes/*}\x12\x93\x01\n\x0b\x44\x65leteIndex\x12\x32.google.firestore.admin.v1beta1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\x32*0/v1beta1/{name=projects/*/databases/*/indexes/*}B\xae\x01\n"com.google.firestore.admin.v1beta1B\x13\x46irestoreAdminProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.DESCRIPTOR,
- google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- ],
-)
-
-
-_INDEXOPERATIONMETADATA_OPERATIONTYPE = _descriptor.EnumDescriptor(
- name="OperationType",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="OPERATION_TYPE_UNSPECIFIED",
- index=0,
- number=0,
- options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="CREATING_INDEX", index=1, number=1, options=None, type=None
- ),
- ],
- containing_type=None,
- options=None,
- serialized_start=603,
- serialized_end=670,
-)
-_sym_db.RegisterEnumDescriptor(_INDEXOPERATIONMETADATA_OPERATIONTYPE)
-
-
-_INDEXOPERATIONMETADATA = _descriptor.Descriptor(
- name="IndexOperationMetadata",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="start_time",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.start_time",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="end_time",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.end_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="index",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.index",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="operation_type",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.operation_type",
- index=3,
- number=4,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="cancelled",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.cancelled",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="document_progress",
- full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.document_progress",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_INDEXOPERATIONMETADATA_OPERATIONTYPE],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=286,
- serialized_end=670,
-)
-
-
-_PROGRESS = _descriptor.Descriptor(
- name="Progress",
- full_name="google.firestore.admin.v1beta1.Progress",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="work_completed",
- full_name="google.firestore.admin.v1beta1.Progress.work_completed",
- index=0,
- number=1,
- type=3,
- cpp_type=2,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="work_estimated",
- full_name="google.firestore.admin.v1beta1.Progress.work_estimated",
- index=1,
- number=2,
- type=3,
- cpp_type=2,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=672,
- serialized_end=730,
-)
-
-
-_CREATEINDEXREQUEST = _descriptor.Descriptor(
- name="CreateIndexRequest",
- full_name="google.firestore.admin.v1beta1.CreateIndexRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.admin.v1beta1.CreateIndexRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="index",
- full_name="google.firestore.admin.v1beta1.CreateIndexRequest.index",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=732,
- serialized_end=822,
-)
-
-
-_GETINDEXREQUEST = _descriptor.Descriptor(
- name="GetIndexRequest",
- full_name="google.firestore.admin.v1beta1.GetIndexRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1beta1.GetIndexRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=824,
- serialized_end=855,
-)
-
-
-_LISTINDEXESREQUEST = _descriptor.Descriptor(
- name="ListIndexesRequest",
- full_name="google.firestore.admin.v1beta1.ListIndexesRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.admin.v1beta1.ListIndexesRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="filter",
- full_name="google.firestore.admin.v1beta1.ListIndexesRequest.filter",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="page_size",
- full_name="google.firestore.admin.v1beta1.ListIndexesRequest.page_size",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="page_token",
- full_name="google.firestore.admin.v1beta1.ListIndexesRequest.page_token",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=857,
- serialized_end=948,
-)
-
-
-_DELETEINDEXREQUEST = _descriptor.Descriptor(
- name="DeleteIndexRequest",
- full_name="google.firestore.admin.v1beta1.DeleteIndexRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1beta1.DeleteIndexRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=950,
- serialized_end=984,
-)
-
-
-_LISTINDEXESRESPONSE = _descriptor.Descriptor(
- name="ListIndexesResponse",
- full_name="google.firestore.admin.v1beta1.ListIndexesResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="indexes",
- full_name="google.firestore.admin.v1beta1.ListIndexesResponse.indexes",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="next_page_token",
- full_name="google.firestore.admin.v1beta1.ListIndexesResponse.next_page_token",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=986,
- serialized_end=1088,
-)
-
-_INDEXOPERATIONMETADATA.fields_by_name[
- "start_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_INDEXOPERATIONMETADATA.fields_by_name[
- "end_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_INDEXOPERATIONMETADATA.fields_by_name[
- "operation_type"
-].enum_type = _INDEXOPERATIONMETADATA_OPERATIONTYPE
-_INDEXOPERATIONMETADATA.fields_by_name["document_progress"].message_type = _PROGRESS
-_INDEXOPERATIONMETADATA_OPERATIONTYPE.containing_type = _INDEXOPERATIONMETADATA
-_CREATEINDEXREQUEST.fields_by_name[
- "index"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX
-)
-_LISTINDEXESRESPONSE.fields_by_name[
- "indexes"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX
-)
-DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA
-DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS
-DESCRIPTOR.message_types_by_name["CreateIndexRequest"] = _CREATEINDEXREQUEST
-DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST
-DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST
-DESCRIPTOR.message_types_by_name["DeleteIndexRequest"] = _DELETEINDEXREQUEST
-DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-IndexOperationMetadata = _reflection.GeneratedProtocolMessageType(
- "IndexOperationMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INDEXOPERATIONMETADATA,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""Metadata for index operations. This metadata populates the metadata
- field of [google.longrunning.Operation][google.longrunning.Operation].
-
-
- Attributes:
- start_time:
- The time that work began on the operation.
- end_time:
- The time the operation ended, either successfully or
- otherwise. Unset if the operation is still active.
- index:
- The index resource that this operation is acting on. For
- example: ``projects/{project_id}/databases/{database_id}/index
- es/{index_id}``
- operation_type:
- The type of index operation.
- cancelled:
- True if the [google.longrunning.Operation] was cancelled. If
- the cancellation is in progress, cancelled will be true but [g
- oogle.longrunning.Operation.done][google.longrunning.Operation
- .done] will be false.
- document_progress:
- Progress of the existing operation, measured in number of
- documents.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexOperationMetadata)
- ),
-)
-_sym_db.RegisterMessage(IndexOperationMetadata)
-
-Progress = _reflection.GeneratedProtocolMessageType(
- "Progress",
- (_message.Message,),
- dict(
- DESCRIPTOR=_PROGRESS,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""Measures the progress of a particular metric.
-
-
- Attributes:
- work_completed:
- An estimate of how much work has been completed. Note that
- this may be greater than ``work_estimated``.
- work_estimated:
- An estimate of how much work needs to be performed. Zero if
- the work estimate is unavailable. May change as work
- progresses.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Progress)
- ),
-)
-_sym_db.RegisterMessage(Progress)
-
-CreateIndexRequest = _reflection.GeneratedProtocolMessageType(
- "CreateIndexRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATEINDEXREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
-
-
- Attributes:
- parent:
- The name of the database this index will apply to. For
- example: ``projects/{project_id}/databases/{database_id}``
- index:
- The index to create. The name and state should not be
- specified. Certain single field indexes cannot be created or
- deleted.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.CreateIndexRequest)
- ),
-)
-_sym_db.RegisterMessage(CreateIndexRequest)
-
-GetIndexRequest = _reflection.GeneratedProtocolMessageType(
- "GetIndexRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETINDEXREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex].
-
-
- Attributes:
- name:
- The name of the index. For example: ``projects/{project_id}/da
- tabases/{database_id}/indexes/{index_id}``
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.GetIndexRequest)
- ),
-)
-_sym_db.RegisterMessage(GetIndexRequest)
-
-ListIndexesRequest = _reflection.GeneratedProtocolMessageType(
- "ListIndexesRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINDEXESREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
-
-
- Attributes:
- parent:
- The database name. For example:
- ``projects/{project_id}/databases/{database_id}``
- page_size:
- The standard List page size.
- page_token:
- The standard List page token.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesRequest)
- ),
-)
-_sym_db.RegisterMessage(ListIndexesRequest)
-
-DeleteIndexRequest = _reflection.GeneratedProtocolMessageType(
- "DeleteIndexRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETEINDEXREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""The request for
- [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex].
-
-
- Attributes:
- name:
- The index name. For example: ``projects/{project_id}/databases
- /{database_id}/indexes/{index_id}``
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.DeleteIndexRequest)
- ),
-)
-_sym_db.RegisterMessage(DeleteIndexRequest)
-
-ListIndexesResponse = _reflection.GeneratedProtocolMessageType(
- "ListIndexesResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINDEXESRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2",
- __doc__="""The response for
- [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
-
-
- Attributes:
- indexes:
- The indexes.
- next_page_token:
- The standard List next-page token.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesResponse)
- ),
-)
-_sym_db.RegisterMessage(ListIndexesResponse)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(
- descriptor_pb2.FileOptions(),
- _b(
- '\n"com.google.firestore.admin.v1beta1B\023FirestoreAdminProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1'
- ),
-)
-
-_FIRESTOREADMIN = _descriptor.ServiceDescriptor(
- name="FirestoreAdmin",
- full_name="google.firestore.admin.v1beta1.FirestoreAdmin",
- file=DESCRIPTOR,
- index=0,
- options=None,
- serialized_start=1091,
- serialized_end=1759,
- methods=[
- _descriptor.MethodDescriptor(
- name="CreateIndex",
- full_name="google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex",
- index=0,
- containing_service=None,
- input_type=_CREATEINDEXREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- options=_descriptor._ParseOptions(
- descriptor_pb2.MethodOptions(),
- _b(
- '\202\323\344\223\0029"0/v1beta1/{parent=projects/*/databases/*}/indexes:\005index'
- ),
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ListIndexes",
- full_name="google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes",
- index=1,
- containing_service=None,
- input_type=_LISTINDEXESREQUEST,
- output_type=_LISTINDEXESRESPONSE,
- options=_descriptor._ParseOptions(
- descriptor_pb2.MethodOptions(),
- _b(
- "\202\323\344\223\0022\0220/v1beta1/{parent=projects/*/databases/*}/indexes"
- ),
- ),
- ),
- _descriptor.MethodDescriptor(
- name="GetIndex",
- full_name="google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex",
- index=2,
- containing_service=None,
- input_type=_GETINDEXREQUEST,
- output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX,
- options=_descriptor._ParseOptions(
- descriptor_pb2.MethodOptions(),
- _b(
- "\202\323\344\223\0022\0220/v1beta1/{name=projects/*/databases/*/indexes/*}"
- ),
- ),
- ),
- _descriptor.MethodDescriptor(
- name="DeleteIndex",
- full_name="google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex",
- index=3,
- containing_service=None,
- input_type=_DELETEINDEXREQUEST,
- output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- options=_descriptor._ParseOptions(
- descriptor_pb2.MethodOptions(),
- _b(
- "\202\323\344\223\0022*0/v1beta1/{name=projects/*/databases/*/indexes/*}"
- ),
- ),
- ),
- ],
-)
-_sym_db.RegisterServiceDescriptor(_FIRESTOREADMIN)
-
-DESCRIPTOR.services_by_name["FirestoreAdmin"] = _FIRESTOREADMIN
-
-try:
- # THESE ELEMENTS WILL BE DEPRECATED.
- # Please use the generated *_pb2_grpc.py files instead.
- import grpc
- from grpc.beta import implementations as beta_implementations
- from grpc.beta import interfaces as beta_interfaces
- from grpc.framework.common import cardinality
- from grpc.framework.interfaces.face import utilities as face_utilities
-
- class FirestoreAdminStub(object):
- """The Cloud Firestore Admin API.
-
- This API provides several administrative services for Cloud Firestore.
-
- # Concepts
-
- Project, Database, Namespace, Collection, and Document are used as defined in
- the Google Cloud Firestore API.
-
- Operation: An Operation represents work being performed in the background.
-
-
- # Services
-
- ## Index
-
- The index service manages Cloud Firestore indexes.
-
- Index creation is performed asynchronously.
- An Operation resource is created for each such asynchronous operation.
- The state of the operation (including any errors encountered)
- may be queried via the Operation resource.
-
- ## Metadata
-
- Provides metadata and statistical information about data in Cloud Firestore.
- The data provided as part of this API may be stale.
-
- ## Operation
-
- The Operations collection provides a record of actions performed for the
- specified Project (including any Operations in progress). Operations are not
- created directly but through calls on other collections or resources.
-
- An Operation that is not yet done may be cancelled. The request to cancel is
- asynchronous and the Operation may continue to run for some time after the
- request to cancel is made.
-
- An Operation that is done may be deleted so that it is no longer listed as
- part of the Operation collection.
-
- Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.CreateIndex = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex",
- request_serializer=CreateIndexRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ListIndexes = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes",
- request_serializer=ListIndexesRequest.SerializeToString,
- response_deserializer=ListIndexesResponse.FromString,
- )
- self.GetIndex = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex",
- request_serializer=GetIndexRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString,
- )
- self.DeleteIndex = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex",
- request_serializer=DeleteIndexRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
-
- class FirestoreAdminServicer(object):
- """The Cloud Firestore Admin API.
-
- This API provides several administrative services for Cloud Firestore.
-
- # Concepts
-
- Project, Database, Namespace, Collection, and Document are used as defined in
- the Google Cloud Firestore API.
-
- Operation: An Operation represents work being performed in the background.
-
-
- # Services
-
- ## Index
-
- The index service manages Cloud Firestore indexes.
-
- Index creation is performed asynchronously.
- An Operation resource is created for each such asynchronous operation.
- The state of the operation (including any errors encountered)
- may be queried via the Operation resource.
-
- ## Metadata
-
- Provides metadata and statistical information about data in Cloud Firestore.
- The data provided as part of this API may be stale.
-
- ## Operation
-
- The Operations collection provides a record of actions performed for the
- specified Project (including any Operations in progress). Operations are not
- created directly but through calls on other collections or resources.
-
- An Operation that is not yet done may be cancelled. The request to cancel is
- asynchronous and the Operation may continue to run for some time after the
- request to cancel is made.
-
- An Operation that is done may be deleted so that it is no longer listed as
- part of the Operation collection.
-
- Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def CreateIndex(self, request, context):
- """Creates the specified index.
- A newly created index's initial state is `CREATING`. On completion of the
- returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
- If the index already exists, the call will return an `ALREADY_EXISTS`
- status.
-
- During creation, the process could result in an error, in which case the
- index will move to the `ERROR` state. The process can be recovered by
- fixing the data that caused the error, removing the index with
- [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
- [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
-
- Indexes with a single field cannot be created.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ListIndexes(self, request, context):
- """Lists the indexes that match the specified filters.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def GetIndex(self, request, context):
- """Gets an index.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def DeleteIndex(self, request, context):
- """Deletes an index.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def add_FirestoreAdminServicer_to_server(servicer, server):
- rpc_method_handlers = {
- "CreateIndex": grpc.unary_unary_rpc_method_handler(
- servicer.CreateIndex,
- request_deserializer=CreateIndexRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ListIndexes": grpc.unary_unary_rpc_method_handler(
- servicer.ListIndexes,
- request_deserializer=ListIndexesRequest.FromString,
- response_serializer=ListIndexesResponse.SerializeToString,
- ),
- "GetIndex": grpc.unary_unary_rpc_method_handler(
- servicer.GetIndex,
- request_deserializer=GetIndexRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString,
- ),
- "DeleteIndex": grpc.unary_unary_rpc_method_handler(
- servicer.DeleteIndex,
- request_deserializer=DeleteIndexRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- "google.firestore.admin.v1beta1.FirestoreAdmin", rpc_method_handlers
- )
- server.add_generic_rpc_handlers((generic_handler,))
-
- class BetaFirestoreAdminServicer(object):
- """The Beta API is deprecated for 0.15.0 and later.
-
- It is recommended to use the GA API (classes and functions in this
- file not marked beta) for all further purposes. This class was generated
- only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
-
- """The Cloud Firestore Admin API.
-
- This API provides several administrative services for Cloud Firestore.
-
- # Concepts
-
- Project, Database, Namespace, Collection, and Document are used as defined in
- the Google Cloud Firestore API.
-
- Operation: An Operation represents work being performed in the background.
-
-
- # Services
-
- ## Index
-
- The index service manages Cloud Firestore indexes.
-
- Index creation is performed asynchronously.
- An Operation resource is created for each such asynchronous operation.
- The state of the operation (including any errors encountered)
- may be queried via the Operation resource.
-
- ## Metadata
-
- Provides metadata and statistical information about data in Cloud Firestore.
- The data provided as part of this API may be stale.
-
- ## Operation
-
- The Operations collection provides a record of actions performed for the
- specified Project (including any Operations in progress). Operations are not
- created directly but through calls on other collections or resources.
-
- An Operation that is not yet done may be cancelled. The request to cancel is
- asynchronous and the Operation may continue to run for some time after the
- request to cancel is made.
-
- An Operation that is done may be deleted so that it is no longer listed as
- part of the Operation collection.
-
- Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def CreateIndex(self, request, context):
- """Creates the specified index.
- A newly created index's initial state is `CREATING`. On completion of the
- returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
- If the index already exists, the call will return an `ALREADY_EXISTS`
- status.
-
- During creation, the process could result in an error, in which case the
- index will move to the `ERROR` state. The process can be recovered by
- fixing the data that caused the error, removing the index with
- [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
- [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
-
- Indexes with a single field cannot be created.
- """
- context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-
- def ListIndexes(self, request, context):
- """Lists the indexes that match the specified filters.
- """
- context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-
- def GetIndex(self, request, context):
- """Gets an index.
- """
- context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-
- def DeleteIndex(self, request, context):
- """Deletes an index.
- """
- context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-
- class BetaFirestoreAdminStub(object):
- """The Beta API is deprecated for 0.15.0 and later.
-
- It is recommended to use the GA API (classes and functions in this
- file not marked beta) for all further purposes. This class was generated
- only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
-
- """The Cloud Firestore Admin API.
-
- This API provides several administrative services for Cloud Firestore.
-
- # Concepts
-
- Project, Database, Namespace, Collection, and Document are used as defined in
- the Google Cloud Firestore API.
-
- Operation: An Operation represents work being performed in the background.
-
-
- # Services
-
- ## Index
-
- The index service manages Cloud Firestore indexes.
-
- Index creation is performed asynchronously.
- An Operation resource is created for each such asynchronous operation.
- The state of the operation (including any errors encountered)
- may be queried via the Operation resource.
-
- ## Metadata
-
- Provides metadata and statistical information about data in Cloud Firestore.
- The data provided as part of this API may be stale.
-
- ## Operation
-
- The Operations collection provides a record of actions performed for the
- specified Project (including any Operations in progress). Operations are not
- created directly but through calls on other collections or resources.
-
- An Operation that is not yet done may be cancelled. The request to cancel is
- asynchronous and the Operation may continue to run for some time after the
- request to cancel is made.
-
- An Operation that is done may be deleted so that it is no longer listed as
- part of the Operation collection.
-
- Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def CreateIndex(
- self,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None,
- ):
- """Creates the specified index.
- A newly created index's initial state is `CREATING`. On completion of the
- returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
- If the index already exists, the call will return an `ALREADY_EXISTS`
- status.
-
- During creation, the process could result in an error, in which case the
- index will move to the `ERROR` state. The process can be recovered by
- fixing the data that caused the error, removing the index with
- [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
- [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
-
- Indexes with a single field cannot be created.
- """
- raise NotImplementedError()
-
- CreateIndex.future = None
-
- def ListIndexes(
- self,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None,
- ):
- """Lists the indexes that match the specified filters.
- """
- raise NotImplementedError()
-
- ListIndexes.future = None
-
- def GetIndex(
- self,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None,
- ):
- """Gets an index.
- """
- raise NotImplementedError()
-
- GetIndex.future = None
-
- def DeleteIndex(
- self,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None,
- ):
- """Deletes an index.
- """
- raise NotImplementedError()
-
- DeleteIndex.future = None
-
- def beta_create_FirestoreAdmin_server(
- servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None
- ):
- """The Beta API is deprecated for 0.15.0 and later.
-
- It is recommended to use the GA API (classes and functions in this
- file not marked beta) for all further purposes. This function was
- generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
- request_deserializers = {
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "CreateIndex",
- ): CreateIndexRequest.FromString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "DeleteIndex",
- ): DeleteIndexRequest.FromString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "GetIndex",
- ): GetIndexRequest.FromString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "ListIndexes",
- ): ListIndexesRequest.FromString,
- }
- response_serializers = {
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "CreateIndex",
- ): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "DeleteIndex",
- ): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "GetIndex",
- ): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "ListIndexes",
- ): ListIndexesResponse.SerializeToString,
- }
- method_implementations = {
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "CreateIndex",
- ): face_utilities.unary_unary_inline(servicer.CreateIndex),
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "DeleteIndex",
- ): face_utilities.unary_unary_inline(servicer.DeleteIndex),
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "GetIndex",
- ): face_utilities.unary_unary_inline(servicer.GetIndex),
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "ListIndexes",
- ): face_utilities.unary_unary_inline(servicer.ListIndexes),
- }
- server_options = beta_implementations.server_options(
- request_deserializers=request_deserializers,
- response_serializers=response_serializers,
- thread_pool=pool,
- thread_pool_size=pool_size,
- default_timeout=default_timeout,
- maximum_timeout=maximum_timeout,
- )
- return beta_implementations.server(
- method_implementations, options=server_options
- )
-
- def beta_create_FirestoreAdmin_stub(
- channel, host=None, metadata_transformer=None, pool=None, pool_size=None
- ):
- """The Beta API is deprecated for 0.15.0 and later.
-
- It is recommended to use the GA API (classes and functions in this
- file not marked beta) for all further purposes. This function was
- generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
- request_serializers = {
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "CreateIndex",
- ): CreateIndexRequest.SerializeToString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "DeleteIndex",
- ): DeleteIndexRequest.SerializeToString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "GetIndex",
- ): GetIndexRequest.SerializeToString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "ListIndexes",
- ): ListIndexesRequest.SerializeToString,
- }
- response_deserializers = {
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "CreateIndex",
- ): google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "DeleteIndex",
- ): google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "GetIndex",
- ): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString,
- (
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- "ListIndexes",
- ): ListIndexesResponse.FromString,
- }
- cardinalities = {
- "CreateIndex": cardinality.Cardinality.UNARY_UNARY,
- "DeleteIndex": cardinality.Cardinality.UNARY_UNARY,
- "GetIndex": cardinality.Cardinality.UNARY_UNARY,
- "ListIndexes": cardinality.Cardinality.UNARY_UNARY,
- }
- stub_options = beta_implementations.stub_options(
- host=host,
- metadata_transformer=metadata_transformer,
- request_serializers=request_serializers,
- response_deserializers=response_deserializers,
- thread_pool=pool,
- thread_pool_size=pool_size,
- )
- return beta_implementations.dynamic_stub(
- channel,
- "google.firestore.admin.v1beta1.FirestoreAdmin",
- cardinalities,
- options=stub_options,
- )
-
-
-except ImportError:
- pass
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py
deleted file mode 100644
index 81eaad7ad1..0000000000
--- a/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py
+++ /dev/null
@@ -1,203 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-from google.cloud.firestore_v1beta1.proto.admin import (
- firestore_admin_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2,
-)
-from google.cloud.firestore_v1beta1.proto.admin import (
- index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2,
-)
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-
-
-class FirestoreAdminStub(object):
- """The Cloud Firestore Admin API.
-
- This API provides several administrative services for Cloud Firestore.
-
- # Concepts
-
- Project, Database, Namespace, Collection, and Document are used as defined in
- the Google Cloud Firestore API.
-
- Operation: An Operation represents work being performed in the background.
-
-
- # Services
-
- ## Index
-
- The index service manages Cloud Firestore indexes.
-
- Index creation is performed asynchronously.
- An Operation resource is created for each such asynchronous operation.
- The state of the operation (including any errors encountered)
- may be queried via the Operation resource.
-
- ## Metadata
-
- Provides metadata and statistical information about data in Cloud Firestore.
- The data provided as part of this API may be stale.
-
- ## Operation
-
- The Operations collection provides a record of actions performed for the
- specified Project (including any Operations in progress). Operations are not
- created directly but through calls on other collections or resources.
-
- An Operation that is not yet done may be cancelled. The request to cancel is
- asynchronous and the Operation may continue to run for some time after the
- request to cancel is made.
-
- An Operation that is done may be deleted so that it is no longer listed as
- part of the Operation collection.
-
- Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.CreateIndex = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ListIndexes = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.FromString,
- )
- self.GetIndex = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString,
- )
- self.DeleteIndex = channel.unary_unary(
- "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
-
-
-class FirestoreAdminServicer(object):
- """The Cloud Firestore Admin API.
-
- This API provides several administrative services for Cloud Firestore.
-
- # Concepts
-
- Project, Database, Namespace, Collection, and Document are used as defined in
- the Google Cloud Firestore API.
-
- Operation: An Operation represents work being performed in the background.
-
-
- # Services
-
- ## Index
-
- The index service manages Cloud Firestore indexes.
-
- Index creation is performed asynchronously.
- An Operation resource is created for each such asynchronous operation.
- The state of the operation (including any errors encountered)
- may be queried via the Operation resource.
-
- ## Metadata
-
- Provides metadata and statistical information about data in Cloud Firestore.
- The data provided as part of this API may be stale.
-
- ## Operation
-
- The Operations collection provides a record of actions performed for the
- specified Project (including any Operations in progress). Operations are not
- created directly but through calls on other collections or resources.
-
- An Operation that is not yet done may be cancelled. The request to cancel is
- asynchronous and the Operation may continue to run for some time after the
- request to cancel is made.
-
- An Operation that is done may be deleted so that it is no longer listed as
- part of the Operation collection.
-
- Operations are created by service `FirestoreAdmin`, but are accessed via
- service `google.longrunning.Operations`.
- """
-
- def CreateIndex(self, request, context):
- """Creates the specified index.
- A newly created index's initial state is `CREATING`. On completion of the
- returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
- If the index already exists, the call will return an `ALREADY_EXISTS`
- status.
-
- During creation, the process could result in an error, in which case the
- index will move to the `ERROR` state. The process can be recovered by
- fixing the data that caused the error, removing the index with
- [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
- [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
-
- Indexes with a single field cannot be created.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ListIndexes(self, request, context):
- """Lists the indexes that match the specified filters.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def GetIndex(self, request, context):
- """Gets an index.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def DeleteIndex(self, request, context):
- """Deletes an index.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
-
-def add_FirestoreAdminServicer_to_server(servicer, server):
- rpc_method_handlers = {
- "CreateIndex": grpc.unary_unary_rpc_method_handler(
- servicer.CreateIndex,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ListIndexes": grpc.unary_unary_rpc_method_handler(
- servicer.ListIndexes,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString,
- ),
- "GetIndex": grpc.unary_unary_rpc_method_handler(
- servicer.GetIndex,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString,
- ),
- "DeleteIndex": grpc.unary_unary_rpc_method_handler(
- servicer.DeleteIndex,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- "google.firestore.admin.v1beta1.FirestoreAdmin", rpc_method_handlers
- )
- server.add_generic_rpc_handlers((generic_handler,))
diff --git a/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py b/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py
deleted file mode 100644
index de43ee88e4..0000000000
--- a/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py
+++ /dev/null
@@ -1,300 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/admin/index.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/admin/index.proto",
- package="google.firestore.admin.v1beta1",
- syntax="proto3",
- serialized_pb=_b(
- '\n6google/cloud/firestore_v1beta1/proto/admin/index.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto"\x9c\x01\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12=\n\x04mode\x18\x02 \x01(\x0e\x32/.google.firestore.admin.v1beta1.IndexField.Mode";\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x02\x12\x0e\n\nDESCENDING\x10\x03"\xe8\x01\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12:\n\x06\x66ields\x18\x03 \x03(\x0b\x32*.google.firestore.admin.v1beta1.IndexField\x12:\n\x05state\x18\x06 \x01(\x0e\x32+.google.firestore.admin.v1beta1.Index.State"B\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x03\x12\t\n\x05READY\x10\x02\x12\t\n\x05\x45RROR\x10\x05\x42\xa5\x01\n"com.google.firestore.admin.v1beta1B\nIndexProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3'
- ),
- dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR],
-)
-
-
-_INDEXFIELD_MODE = _descriptor.EnumDescriptor(
- name="Mode",
- full_name="google.firestore.admin.v1beta1.IndexField.Mode",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="MODE_UNSPECIFIED", index=0, number=0, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ASCENDING", index=1, number=2, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DESCENDING", index=2, number=3, options=None, type=None
- ),
- ],
- containing_type=None,
- options=None,
- serialized_start=218,
- serialized_end=277,
-)
-_sym_db.RegisterEnumDescriptor(_INDEXFIELD_MODE)
-
-_INDEX_STATE = _descriptor.EnumDescriptor(
- name="State",
- full_name="google.firestore.admin.v1beta1.Index.State",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="STATE_UNSPECIFIED", index=0, number=0, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="CREATING", index=1, number=3, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="READY", index=2, number=2, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ERROR", index=3, number=5, options=None, type=None
- ),
- ],
- containing_type=None,
- options=None,
- serialized_start=446,
- serialized_end=512,
-)
-_sym_db.RegisterEnumDescriptor(_INDEX_STATE)
-
-
-_INDEXFIELD = _descriptor.Descriptor(
- name="IndexField",
- full_name="google.firestore.admin.v1beta1.IndexField",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field_path",
- full_name="google.firestore.admin.v1beta1.IndexField.field_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="mode",
- full_name="google.firestore.admin.v1beta1.IndexField.mode",
- index=1,
- number=2,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_INDEXFIELD_MODE],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=121,
- serialized_end=277,
-)
-
-
-_INDEX = _descriptor.Descriptor(
- name="Index",
- full_name="google.firestore.admin.v1beta1.Index",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.admin.v1beta1.Index.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="collection_id",
- full_name="google.firestore.admin.v1beta1.Index.collection_id",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.firestore.admin.v1beta1.Index.fields",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- _descriptor.FieldDescriptor(
- name="state",
- full_name="google.firestore.admin.v1beta1.Index.state",
- index=3,
- number=6,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_INDEX_STATE],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=280,
- serialized_end=512,
-)
-
-_INDEXFIELD.fields_by_name["mode"].enum_type = _INDEXFIELD_MODE
-_INDEXFIELD_MODE.containing_type = _INDEXFIELD
-_INDEX.fields_by_name["fields"].message_type = _INDEXFIELD
-_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE
-_INDEX_STATE.containing_type = _INDEX
-DESCRIPTOR.message_types_by_name["IndexField"] = _INDEXFIELD
-DESCRIPTOR.message_types_by_name["Index"] = _INDEX
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-IndexField = _reflection.GeneratedProtocolMessageType(
- "IndexField",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INDEXFIELD,
- __module__="google.cloud.firestore_v1beta1.proto.admin.index_pb2",
- __doc__="""A field of an index.
-
-
- Attributes:
- field_path:
- The path of the field. Must match the field path specification
- described by
- [google.firestore.v1beta1.Document.fields][fields]. Special
- field path ``__name__`` may be used by itself or at the end of
- a path. ``__type__`` may be used only at the end of path.
- mode:
- The field's mode.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexField)
- ),
-)
-_sym_db.RegisterMessage(IndexField)
-
-Index = _reflection.GeneratedProtocolMessageType(
- "Index",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INDEX,
- __module__="google.cloud.firestore_v1beta1.proto.admin.index_pb2",
- __doc__="""An index definition.
-
-
- Attributes:
- name:
- The resource name of the index.
- collection_id:
- The collection ID to which this index applies. Required.
- fields:
- The fields to index.
- state:
- The state of the index. The state is read-only. @OutputOnly
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Index)
- ),
-)
-_sym_db.RegisterMessage(Index)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(
- descriptor_pb2.FileOptions(),
- _b(
- '\n"com.google.firestore.admin.v1beta1B\nIndexProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1'
- ),
-)
-try:
- # THESE ELEMENTS WILL BE DEPRECATED.
- # Please use the generated *_pb2_grpc.py files instead.
- import grpc
- from grpc.beta import implementations as beta_implementations
- from grpc.beta import interfaces as beta_interfaces
- from grpc.framework.common import cardinality
- from grpc.framework.interfaces.face import utilities as face_utilities
-except ImportError:
- pass
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1beta1/proto/common.proto b/google/cloud/firestore_v1beta1/proto/common.proto
deleted file mode 100644
index 2eaa183470..0000000000
--- a/google/cloud/firestore_v1beta1/proto/common.proto
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1beta1;
-
-import "google/protobuf/timestamp.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "CommonProto";
-option java_package = "com.google.firestore.v1beta1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
-
-// A set of field paths on a document.
-// Used to restrict a get or update operation on a document to a subset of its
-// fields.
-// This is different from standard field masks, as this is always scoped to a
-// [Document][google.firestore.v1beta1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1beta1.Value].
-message DocumentMask {
- // The list of field paths in the mask. See [Document.fields][google.firestore.v1beta1.Document.fields] for a field
- // path syntax reference.
- repeated string field_paths = 1;
-}
-
-// A precondition on a document, used for conditional operations.
-message Precondition {
- // The type of precondition.
- oneof condition_type {
- // When set to `true`, the target document must exist.
- // When set to `false`, the target document must not exist.
- bool exists = 1;
-
- // When set, the target document must exist and have been last updated at
- // that time.
- google.protobuf.Timestamp update_time = 2;
- }
-}
-
-// Options for creating a new transaction.
-message TransactionOptions {
- // Options for a transaction that can be used to read and write documents.
- message ReadWrite {
- // An optional transaction to retry.
- bytes retry_transaction = 1;
- }
-
- // Options for a transaction that can only be used to read documents.
- message ReadOnly {
- // The consistency mode for this transaction. If not set, defaults to strong
- // consistency.
- oneof consistency_selector {
- // Reads documents at the given time.
- // This may not be older than 60 seconds.
- google.protobuf.Timestamp read_time = 2;
- }
- }
-
- // The mode of the transaction.
- oneof mode {
- // The transaction can only be used for read operations.
- ReadOnly read_only = 2;
-
- // The transaction can be used for both read and write operations.
- ReadWrite read_write = 3;
- }
-}
diff --git a/google/cloud/firestore_v1beta1/proto/common_pb2.py b/google/cloud/firestore_v1beta1/proto/common_pb2.py
deleted file mode 100644
index 8469940a4c..0000000000
--- a/google/cloud/firestore_v1beta1/proto/common_pb2.py
+++ /dev/null
@@ -1,454 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/common.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/common.proto",
- package="google.firestore.v1beta1",
- syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.firestore.v1beta1B\013CommonProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1"
- ),
- serialized_pb=_b(
- '\n1google/cloud/firestore_v1beta1/proto/common.proto\x12\x18google.firestore.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"#\n\x0c\x44ocumentMask\x12\x13\n\x0b\x66ield_paths\x18\x01 \x03(\t"e\n\x0cPrecondition\x12\x10\n\x06\x65xists\x18\x01 \x01(\x08H\x00\x12\x31\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x10\n\x0e\x63ondition_type"\xb3\x02\n\x12TransactionOptions\x12J\n\tread_only\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.TransactionOptions.ReadOnlyH\x00\x12L\n\nread_write\x18\x03 \x01(\x0b\x32\x36.google.firestore.v1beta1.TransactionOptions.ReadWriteH\x00\x1a&\n\tReadWrite\x12\x19\n\x11retry_transaction\x18\x01 \x01(\x0c\x1aS\n\x08ReadOnly\x12/\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selectorB\x06\n\x04modeB\xb9\x01\n\x1c\x63om.google.firestore.v1beta1B\x0b\x43ommonProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3'
- ),
- dependencies=[
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_DOCUMENTMASK = _descriptor.Descriptor(
- name="DocumentMask",
- full_name="google.firestore.v1beta1.DocumentMask",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field_paths",
- full_name="google.firestore.v1beta1.DocumentMask.field_paths",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=142,
- serialized_end=177,
-)
-
-
-_PRECONDITION = _descriptor.Descriptor(
- name="Precondition",
- full_name="google.firestore.v1beta1.Precondition",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="exists",
- full_name="google.firestore.v1beta1.Precondition.exists",
- index=0,
- number=1,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_time",
- full_name="google.firestore.v1beta1.Precondition.update_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="condition_type",
- full_name="google.firestore.v1beta1.Precondition.condition_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=179,
- serialized_end=280,
-)
-
-
-_TRANSACTIONOPTIONS_READWRITE = _descriptor.Descriptor(
- name="ReadWrite",
- full_name="google.firestore.v1beta1.TransactionOptions.ReadWrite",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="retry_transaction",
- full_name="google.firestore.v1beta1.TransactionOptions.ReadWrite.retry_transaction",
- index=0,
- number=1,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=459,
- serialized_end=497,
-)
-
-_TRANSACTIONOPTIONS_READONLY = _descriptor.Descriptor(
- name="ReadOnly",
- full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly.read_time",
- index=0,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="consistency_selector",
- full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly.consistency_selector",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=499,
- serialized_end=582,
-)
-
-_TRANSACTIONOPTIONS = _descriptor.Descriptor(
- name="TransactionOptions",
- full_name="google.firestore.v1beta1.TransactionOptions",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="read_only",
- full_name="google.firestore.v1beta1.TransactionOptions.read_only",
- index=0,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_write",
- full_name="google.firestore.v1beta1.TransactionOptions.read_write",
- index=1,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_TRANSACTIONOPTIONS_READWRITE, _TRANSACTIONOPTIONS_READONLY],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="mode",
- full_name="google.firestore.v1beta1.TransactionOptions.mode",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=283,
- serialized_end=590,
-)
-
-_PRECONDITION.fields_by_name[
- "update_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_PRECONDITION.oneofs_by_name["condition_type"].fields.append(
- _PRECONDITION.fields_by_name["exists"]
-)
-_PRECONDITION.fields_by_name["exists"].containing_oneof = _PRECONDITION.oneofs_by_name[
- "condition_type"
-]
-_PRECONDITION.oneofs_by_name["condition_type"].fields.append(
- _PRECONDITION.fields_by_name["update_time"]
-)
-_PRECONDITION.fields_by_name[
- "update_time"
-].containing_oneof = _PRECONDITION.oneofs_by_name["condition_type"]
-_TRANSACTIONOPTIONS_READWRITE.containing_type = _TRANSACTIONOPTIONS
-_TRANSACTIONOPTIONS_READONLY.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TRANSACTIONOPTIONS_READONLY.containing_type = _TRANSACTIONOPTIONS
-_TRANSACTIONOPTIONS_READONLY.oneofs_by_name["consistency_selector"].fields.append(
- _TRANSACTIONOPTIONS_READONLY.fields_by_name["read_time"]
-)
-_TRANSACTIONOPTIONS_READONLY.fields_by_name[
- "read_time"
-].containing_oneof = _TRANSACTIONOPTIONS_READONLY.oneofs_by_name["consistency_selector"]
-_TRANSACTIONOPTIONS.fields_by_name[
- "read_only"
-].message_type = _TRANSACTIONOPTIONS_READONLY
-_TRANSACTIONOPTIONS.fields_by_name[
- "read_write"
-].message_type = _TRANSACTIONOPTIONS_READWRITE
-_TRANSACTIONOPTIONS.oneofs_by_name["mode"].fields.append(
- _TRANSACTIONOPTIONS.fields_by_name["read_only"]
-)
-_TRANSACTIONOPTIONS.fields_by_name[
- "read_only"
-].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name["mode"]
-_TRANSACTIONOPTIONS.oneofs_by_name["mode"].fields.append(
- _TRANSACTIONOPTIONS.fields_by_name["read_write"]
-)
-_TRANSACTIONOPTIONS.fields_by_name[
- "read_write"
-].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name["mode"]
-DESCRIPTOR.message_types_by_name["DocumentMask"] = _DOCUMENTMASK
-DESCRIPTOR.message_types_by_name["Precondition"] = _PRECONDITION
-DESCRIPTOR.message_types_by_name["TransactionOptions"] = _TRANSACTIONOPTIONS
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-DocumentMask = _reflection.GeneratedProtocolMessageType(
- "DocumentMask",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCUMENTMASK,
- __module__="google.cloud.firestore_v1beta1.proto.common_pb2",
- __doc__="""A set of field paths on a document. Used to restrict a get
- or update operation on a document to a subset of its fields. This is
- different from standard field masks, as this is always scoped to a
- [Document][google.firestore.v1beta1.Document], and takes in account the
- dynamic nature of [Value][google.firestore.v1beta1.Value].
-
-
- Attributes:
- field_paths:
- The list of field paths in the mask. See
- [Document.fields][google.firestore.v1beta1.Document.fields]
- for a field path syntax reference.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentMask)
- ),
-)
-_sym_db.RegisterMessage(DocumentMask)
-
-Precondition = _reflection.GeneratedProtocolMessageType(
- "Precondition",
- (_message.Message,),
- dict(
- DESCRIPTOR=_PRECONDITION,
- __module__="google.cloud.firestore_v1beta1.proto.common_pb2",
- __doc__="""A precondition on a document, used for conditional
- operations.
-
-
- Attributes:
- condition_type:
- The type of precondition.
- exists:
- When set to ``true``, the target document must exist. When set
- to ``false``, the target document must not exist.
- update_time:
- When set, the target document must exist and have been last
- updated at that time.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Precondition)
- ),
-)
-_sym_db.RegisterMessage(Precondition)
-
-TransactionOptions = _reflection.GeneratedProtocolMessageType(
- "TransactionOptions",
- (_message.Message,),
- dict(
- ReadWrite=_reflection.GeneratedProtocolMessageType(
- "ReadWrite",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TRANSACTIONOPTIONS_READWRITE,
- __module__="google.cloud.firestore_v1beta1.proto.common_pb2",
- __doc__="""Options for a transaction that can be used to read and
- write documents.
-
-
- Attributes:
- retry_transaction:
- An optional transaction to retry.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadWrite)
- ),
- ),
- ReadOnly=_reflection.GeneratedProtocolMessageType(
- "ReadOnly",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TRANSACTIONOPTIONS_READONLY,
- __module__="google.cloud.firestore_v1beta1.proto.common_pb2",
- __doc__="""Options for a transaction that can only be used to read
- documents.
-
-
- Attributes:
- consistency_selector:
- The consistency mode for this transaction. If not set,
- defaults to strong consistency.
- read_time:
- Reads documents at the given time. This may not be older than
- 60 seconds.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadOnly)
- ),
- ),
- DESCRIPTOR=_TRANSACTIONOPTIONS,
- __module__="google.cloud.firestore_v1beta1.proto.common_pb2",
- __doc__="""Options for creating a new transaction.
-
-
- Attributes:
- mode:
- The mode of the transaction.
- read_only:
- The transaction can only be used for read operations.
- read_write:
- The transaction can be used for both read and write
- operations.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions)
- ),
-)
-_sym_db.RegisterMessage(TransactionOptions)
-_sym_db.RegisterMessage(TransactionOptions.ReadWrite)
-_sym_db.RegisterMessage(TransactionOptions.ReadOnly)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1beta1/proto/document.proto b/google/cloud/firestore_v1beta1/proto/document.proto
deleted file mode 100644
index 7caae4688a..0000000000
--- a/google/cloud/firestore_v1beta1/proto/document.proto
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1beta1;
-
-import "google/protobuf/struct.proto";
-import "google/protobuf/timestamp.proto";
-import "google/type/latlng.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "DocumentProto";
-option java_package = "com.google.firestore.v1beta1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
-
-// A Firestore document.
-//
-// Must not exceed 1 MiB - 4 bytes.
-message Document {
- // The resource name of the document, for example
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string name = 1;
-
- // The document's fields.
- //
- // The map keys represent field names.
- //
- // A simple field name contains only characters `a` to `z`, `A` to `Z`,
- // `0` to `9`, or `_`, and must not start with `0` to `9`. For example,
- // `foo_bar_17`.
- //
- // Field names matching the regular expression `__.*__` are reserved. Reserved
- // field names are forbidden except in certain documented contexts. The map
- // keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be
- // empty.
- //
- // Field paths may be used in other contexts to refer to structured fields
- // defined here. For `map_value`, the field path is represented by the simple
- // or quoted field names of the containing fields, delimited by `.`. For
- // example, the structured field
- // `"foo" : { map_value: { "x&y" : { string_value: "hello" }}}` would be
- // represented by the field path `foo.x&y`.
- //
- // Within a field path, a quoted field name starts and ends with `` ` `` and
- // may contain any character. Some characters, including `` ` ``, must be
- // escaped using a `\`. For example, `` `x&y` `` represents `x&y` and
- // `` `bak\`tik` `` represents `` bak`tik ``.
- map fields = 2;
-
- // Output only. The time at which the document was created.
- //
- // This value increases monotonically when a document is deleted then
- // recreated. It can also be compared to values from other documents and
- // the `read_time` of a query.
- google.protobuf.Timestamp create_time = 3;
-
- // Output only. The time at which the document was last changed.
- //
- // This value is initially set to the `create_time` then increases
- // monotonically with each change to the document. It can also be
- // compared to values from other documents and the `read_time` of a query.
- google.protobuf.Timestamp update_time = 4;
-}
-
-// A message that can hold any of the supported value types.
-message Value {
- // Must have a value set.
- oneof value_type {
- // A null value.
- google.protobuf.NullValue null_value = 11;
-
- // A boolean value.
- bool boolean_value = 1;
-
- // An integer value.
- int64 integer_value = 2;
-
- // A double value.
- double double_value = 3;
-
- // A timestamp value.
- //
- // Precise only to microseconds. When stored, any additional precision is
- // rounded down.
- google.protobuf.Timestamp timestamp_value = 10;
-
- // A string value.
- //
- // The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes.
- // Only the first 1,500 bytes of the UTF-8 representation are considered by
- // queries.
- string string_value = 17;
-
- // A bytes value.
- //
- // Must not exceed 1 MiB - 89 bytes.
- // Only the first 1,500 bytes are considered by queries.
- bytes bytes_value = 18;
-
- // A reference to a document. For example:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string reference_value = 5;
-
- // A geo point value representing a point on the surface of Earth.
- google.type.LatLng geo_point_value = 8;
-
- // An array value.
- //
- // Cannot directly contain another array value, though can contain an
- // map which contains another array.
- ArrayValue array_value = 9;
-
- // A map value.
- MapValue map_value = 6;
- }
-}
-
-// An array value.
-message ArrayValue {
- // Values in the array.
- repeated Value values = 1;
-}
-
-// A map value.
-message MapValue {
- // The map's fields.
- //
- // The map keys represent field names. Field names matching the regular
- // expression `__.*__` are reserved. Reserved field names are forbidden except
- // in certain documented contexts. The map keys, represented as UTF-8, must
- // not exceed 1,500 bytes and cannot be empty.
- map fields = 1;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/document_pb2.py b/google/cloud/firestore_v1beta1/proto/document_pb2.py
deleted file mode 100644
index 4ca1f65ed7..0000000000
--- a/google/cloud/firestore_v1beta1/proto/document_pb2.py
+++ /dev/null
@@ -1,798 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/document.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/document.proto",
- package="google.firestore.v1beta1",
- syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.firestore.v1beta1B\rDocumentProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1"
- ),
- serialized_pb=_b(
- '\n3google/cloud/firestore_v1beta1/proto/document.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\x1a\x1cgoogle/api/annotations.proto"\x8a\x02\n\x08\x44ocument\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x06\x66ields\x18\x02 \x03(\x0b\x32..google.firestore.v1beta1.Document.FieldsEntry\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01"\xb8\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x12 \x01(\x0cH\x00\x12\x19\n\x0freference_value\x18\x05 \x01(\tH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x37\n\tmap_value\x18\x06 \x01(\x0b\x32".google.firestore.v1beta1.MapValueH\x00\x42\x0c\n\nvalue_type"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x9a\x01\n\x08MapValue\x12>\n\x06\x66ields\x18\x01 \x03(\x0b\x32..google.firestore.v1beta1.MapValue.FieldsEntry\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\x42\xbb\x01\n\x1c\x63om.google.firestore.v1beta1B\rDocumentProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3'
- ),
- dependencies=[
- google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- google_dot_type_dot_latlng__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_DOCUMENT_FIELDSENTRY = _descriptor.Descriptor(
- name="FieldsEntry",
- full_name="google.firestore.v1beta1.Document.FieldsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.firestore.v1beta1.Document.FieldsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.firestore.v1beta1.Document.FieldsEntry.value",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=389,
- serialized_end=467,
-)
-
-_DOCUMENT = _descriptor.Descriptor(
- name="Document",
- full_name="google.firestore.v1beta1.Document",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.v1beta1.Document.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.firestore.v1beta1.Document.fields",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="create_time",
- full_name="google.firestore.v1beta1.Document.create_time",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_time",
- full_name="google.firestore.v1beta1.Document.update_time",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_DOCUMENT_FIELDSENTRY],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=201,
- serialized_end=467,
-)
-
-
-_VALUE = _descriptor.Descriptor(
- name="Value",
- full_name="google.firestore.v1beta1.Value",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="null_value",
- full_name="google.firestore.v1beta1.Value.null_value",
- index=0,
- number=11,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="boolean_value",
- full_name="google.firestore.v1beta1.Value.boolean_value",
- index=1,
- number=1,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="integer_value",
- full_name="google.firestore.v1beta1.Value.integer_value",
- index=2,
- number=2,
- type=3,
- cpp_type=2,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="double_value",
- full_name="google.firestore.v1beta1.Value.double_value",
- index=3,
- number=3,
- type=1,
- cpp_type=5,
- label=1,
- has_default_value=False,
- default_value=float(0),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="timestamp_value",
- full_name="google.firestore.v1beta1.Value.timestamp_value",
- index=4,
- number=10,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="string_value",
- full_name="google.firestore.v1beta1.Value.string_value",
- index=5,
- number=17,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="bytes_value",
- full_name="google.firestore.v1beta1.Value.bytes_value",
- index=6,
- number=18,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="reference_value",
- full_name="google.firestore.v1beta1.Value.reference_value",
- index=7,
- number=5,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="geo_point_value",
- full_name="google.firestore.v1beta1.Value.geo_point_value",
- index=8,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="array_value",
- full_name="google.firestore.v1beta1.Value.array_value",
- index=9,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="map_value",
- full_name="google.firestore.v1beta1.Value.map_value",
- index=10,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="value_type",
- full_name="google.firestore.v1beta1.Value.value_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=470,
- serialized_end=910,
-)
-
-
-_ARRAYVALUE = _descriptor.Descriptor(
- name="ArrayValue",
- full_name="google.firestore.v1beta1.ArrayValue",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="values",
- full_name="google.firestore.v1beta1.ArrayValue.values",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=912,
- serialized_end=973,
-)
-
-
-_MAPVALUE_FIELDSENTRY = _descriptor.Descriptor(
- name="FieldsEntry",
- full_name="google.firestore.v1beta1.MapValue.FieldsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.firestore.v1beta1.MapValue.FieldsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.firestore.v1beta1.MapValue.FieldsEntry.value",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=389,
- serialized_end=467,
-)
-
-_MAPVALUE = _descriptor.Descriptor(
- name="MapValue",
- full_name="google.firestore.v1beta1.MapValue",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.firestore.v1beta1.MapValue.fields",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[_MAPVALUE_FIELDSENTRY],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=976,
- serialized_end=1130,
-)
-
-_DOCUMENT_FIELDSENTRY.fields_by_name["value"].message_type = _VALUE
-_DOCUMENT_FIELDSENTRY.containing_type = _DOCUMENT
-_DOCUMENT.fields_by_name["fields"].message_type = _DOCUMENT_FIELDSENTRY
-_DOCUMENT.fields_by_name[
- "create_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_DOCUMENT.fields_by_name[
- "update_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_VALUE.fields_by_name[
- "null_value"
-].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE
-_VALUE.fields_by_name[
- "timestamp_value"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_VALUE.fields_by_name[
- "geo_point_value"
-].message_type = google_dot_type_dot_latlng__pb2._LATLNG
-_VALUE.fields_by_name["array_value"].message_type = _ARRAYVALUE
-_VALUE.fields_by_name["map_value"].message_type = _MAPVALUE
-_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["null_value"])
-_VALUE.fields_by_name["null_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(
- _VALUE.fields_by_name["boolean_value"]
-)
-_VALUE.fields_by_name["boolean_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(
- _VALUE.fields_by_name["integer_value"]
-)
-_VALUE.fields_by_name["integer_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["double_value"])
-_VALUE.fields_by_name["double_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(
- _VALUE.fields_by_name["timestamp_value"]
-)
-_VALUE.fields_by_name["timestamp_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["string_value"])
-_VALUE.fields_by_name["string_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["bytes_value"])
-_VALUE.fields_by_name["bytes_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(
- _VALUE.fields_by_name["reference_value"]
-)
-_VALUE.fields_by_name["reference_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(
- _VALUE.fields_by_name["geo_point_value"]
-)
-_VALUE.fields_by_name["geo_point_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["array_value"])
-_VALUE.fields_by_name["array_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["map_value"])
-_VALUE.fields_by_name["map_value"].containing_oneof = _VALUE.oneofs_by_name[
- "value_type"
-]
-_ARRAYVALUE.fields_by_name["values"].message_type = _VALUE
-_MAPVALUE_FIELDSENTRY.fields_by_name["value"].message_type = _VALUE
-_MAPVALUE_FIELDSENTRY.containing_type = _MAPVALUE
-_MAPVALUE.fields_by_name["fields"].message_type = _MAPVALUE_FIELDSENTRY
-DESCRIPTOR.message_types_by_name["Document"] = _DOCUMENT
-DESCRIPTOR.message_types_by_name["Value"] = _VALUE
-DESCRIPTOR.message_types_by_name["ArrayValue"] = _ARRAYVALUE
-DESCRIPTOR.message_types_by_name["MapValue"] = _MAPVALUE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Document = _reflection.GeneratedProtocolMessageType(
- "Document",
- (_message.Message,),
- dict(
- FieldsEntry=_reflection.GeneratedProtocolMessageType(
- "FieldsEntry",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCUMENT_FIELDSENTRY,
- __module__="google.cloud.firestore_v1beta1.proto.document_pb2"
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document.FieldsEntry)
- ),
- ),
- DESCRIPTOR=_DOCUMENT,
- __module__="google.cloud.firestore_v1beta1.proto.document_pb2",
- __doc__="""A Firestore document.
-
- Must not exceed 1 MiB - 4 bytes.
-
-
- Attributes:
- name:
- The resource name of the document, for example ``projects/{pro
- ject_id}/databases/{database_id}/documents/{document_path}``.
- fields:
- The document's fields. The map keys represent field names. A
- simple field name contains only characters ``a`` to ``z``,
- ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start
- with ``0`` to ``9``. For example, ``foo_bar_17``. Field names
- matching the regular expression ``__.*__`` are reserved.
- Reserved field names are forbidden except in certain
- documented contexts. The map keys, represented as UTF-8, must
- not exceed 1,500 bytes and cannot be empty. Field paths may
- be used in other contexts to refer to structured fields
- defined here. For ``map_value``, the field path is represented
- by the simple or quoted field names of the containing fields,
- delimited by ``.``. For example, the structured field ``"foo"
- : { map_value: { "x&y" : { string_value: "hello" }}}`` would
- be represented by the field path ``foo.x&y``. Within a field
- path, a quoted field name starts and ends with ````` and may
- contain any character. Some characters, including `````, must
- be escaped using a ``\``. For example, ```x&y``` represents
- ``x&y`` and ```bak\`tik``` represents ``bak`tik``.
- create_time:
- Output only. The time at which the document was created. This
- value increases monotonically when a document is deleted then
- recreated. It can also be compared to values from other
- documents and the ``read_time`` of a query.
- update_time:
- Output only. The time at which the document was last changed.
- This value is initially set to the ``create_time`` then
- increases monotonically with each change to the document. It
- can also be compared to values from other documents and the
- ``read_time`` of a query.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document)
- ),
-)
-_sym_db.RegisterMessage(Document)
-_sym_db.RegisterMessage(Document.FieldsEntry)
-
-Value = _reflection.GeneratedProtocolMessageType(
- "Value",
- (_message.Message,),
- dict(
- DESCRIPTOR=_VALUE,
- __module__="google.cloud.firestore_v1beta1.proto.document_pb2",
- __doc__="""A message that can hold any of the supported value types.
-
-
- Attributes:
- value_type:
- Must have a value set.
- null_value:
- A null value.
- boolean_value:
- A boolean value.
- integer_value:
- An integer value.
- double_value:
- A double value.
- timestamp_value:
- A timestamp value. Precise only to microseconds. When stored,
- any additional precision is rounded down.
- string_value:
- A string value. The string, represented as UTF-8, must not
- exceed 1 MiB - 89 bytes. Only the first 1,500 bytes of the
- UTF-8 representation are considered by queries.
- bytes_value:
- A bytes value. Must not exceed 1 MiB - 89 bytes. Only the
- first 1,500 bytes are considered by queries.
- reference_value:
- A reference to a document. For example: ``projects/{project_id
- }/databases/{database_id}/documents/{document_path}``.
- geo_point_value:
- A geo point value representing a point on the surface of
- Earth.
- array_value:
- An array value. Cannot directly contain another array value,
- though can contain an map which contains another array.
- map_value:
- A map value.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Value)
- ),
-)
-_sym_db.RegisterMessage(Value)
-
-ArrayValue = _reflection.GeneratedProtocolMessageType(
- "ArrayValue",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ARRAYVALUE,
- __module__="google.cloud.firestore_v1beta1.proto.document_pb2",
- __doc__="""An array value.
-
-
- Attributes:
- values:
- Values in the array.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ArrayValue)
- ),
-)
-_sym_db.RegisterMessage(ArrayValue)
-
-MapValue = _reflection.GeneratedProtocolMessageType(
- "MapValue",
- (_message.Message,),
- dict(
- FieldsEntry=_reflection.GeneratedProtocolMessageType(
- "FieldsEntry",
- (_message.Message,),
- dict(
- DESCRIPTOR=_MAPVALUE_FIELDSENTRY,
- __module__="google.cloud.firestore_v1beta1.proto.document_pb2"
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue.FieldsEntry)
- ),
- ),
- DESCRIPTOR=_MAPVALUE,
- __module__="google.cloud.firestore_v1beta1.proto.document_pb2",
- __doc__="""A map value.
-
-
- Attributes:
- fields:
- The map's fields. The map keys represent field names. Field
- names matching the regular expression ``__.*__`` are reserved.
- Reserved field names are forbidden except in certain
- documented contexts. The map keys, represented as UTF-8, must
- not exceed 1,500 bytes and cannot be empty.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue)
- ),
-)
-_sym_db.RegisterMessage(MapValue)
-_sym_db.RegisterMessage(MapValue.FieldsEntry)
-
-
-DESCRIPTOR._options = None
-_DOCUMENT_FIELDSENTRY._options = None
-_MAPVALUE_FIELDSENTRY._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py b/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py
deleted file mode 100644
index 957acef269..0000000000
--- a/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.cloud.firestore_v1beta1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
-)
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto",
- package="google.firestore.v1beta1",
- syntax="proto3",
- serialized_pb=_b(
- "\nEgoogle/cloud/firestore_v1beta1/proto/event_flow_document_change.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.protoB\xa2\x01\n\x1c\x63om.google.firestore.v1beta1B\x1c\x45ventFlowDocumentChangeProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1b\x06proto3"
- ),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
- ],
-)
-
-
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(
- descriptor_pb2.FileOptions(),
- _b(
- "\n\034com.google.firestore.v1beta1B\034EventFlowDocumentChangeProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\252\002\036Google.Cloud.Firestore.V1Beta1"
- ),
-)
-try:
- # THESE ELEMENTS WILL BE DEPRECATED.
- # Please use the generated *_pb2_grpc.py files instead.
- import grpc
- from grpc.beta import implementations as beta_implementations
- from grpc.beta import interfaces as beta_interfaces
- from grpc.framework.common import cardinality
- from grpc.framework.interfaces.face import utilities as face_utilities
-except ImportError:
- pass
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1beta1/proto/field.proto b/google/cloud/firestore_v1beta1/proto/field.proto
deleted file mode 100644
index 9d1534eb1f..0000000000
--- a/google/cloud/firestore_v1beta1/proto/field.proto
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2018 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1beta2;
-
-import "google/api/annotations.proto";
-import "google/firestore/admin/v1beta2/index.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta2";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta2;admin";
-option java_multiple_files = true;
-option java_outer_classname = "FieldProto";
-option java_package = "com.google.firestore.admin.v1beta2";
-option objc_class_prefix = "GCFS";
-
-
-// Represents a single field in the database.
-//
-// Fields are grouped by their "Collection Group", which represent all
-// collections in the database with the same id.
-message Field {
- // The index configuration for this field.
- message IndexConfig {
- // The indexes supported for this field.
- repeated Index indexes = 1;
-
- // Output only.
- // When true, the `Field`'s index configuration is set from the
- // configuration specified by the `ancestor_field`.
- // When false, the `Field`'s index configuration is defined explicitly.
- bool uses_ancestor_config = 2;
-
- // Output only.
- // Specifies the resource name of the `Field` from which this field's
- // index configuration is set (when `uses_ancestor_config` is true),
- // or from which it *would* be set if this field had no index configuration
- // (when `uses_ancestor_config` is false).
- string ancestor_field = 3;
-
- // Output only
- // When true, the `Field`'s index configuration is in the process of being
- // reverted. Once complete, the index config will transition to the same
- // state as the field specified by `ancestor_field`, at which point
- // `uses_ancestor_config` will be `true` and `reverting` will be `false`.
- bool reverting = 4;
- }
-
- // A field name of the form
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
- //
- // A field path may be a simple field name, e.g. `address` or a path to fields
- // within map_value , e.g. `address.city`,
- // or a special field path. The only valid special field is `*`, which
- // represents any field.
- //
- // Field paths may be quoted using ` (backtick). The only character that needs
- // to be escaped within a quoted field path is the backtick character itself,
- // escaped using a backslash. Special characters in field paths that
- // must be quoted include: `*`, `.`,
- // ``` (backtick), `[`, `]`, as well as any ascii symbolic characters.
- //
- // Examples:
- // (Note: Comments here are written in markdown syntax, so there is an
- // additional layer of backticks to represent a code block)
- // `\`address.city\`` represents a field named `address.city`, not the map key
- // `city` in the field `address`.
- // `\`*\`` represents a field named `*`, not any field.
- //
- // A special `Field` contains the default indexing settings for all fields.
- // This field's resource name is:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`
- // Indexes defined on this `Field` will be applied to all fields which do not
- // have their own `Field` index configuration.
- string name = 1;
-
- // The index configuration for this field. If unset, field indexing will
- // revert to the configuration defined by the `ancestor_field`. To
- // explicitly remove all indexes for this field, specify an index config
- // with an empty list of indexes.
- IndexConfig index_config = 2;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/firestore.proto b/google/cloud/firestore_v1beta1/proto/firestore.proto
deleted file mode 100644
index c2b15b0487..0000000000
--- a/google/cloud/firestore_v1beta1/proto/firestore.proto
+++ /dev/null
@@ -1,765 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1beta1;
-
-import "google/api/annotations.proto";
-import "google/api/client.proto";
-import "google/api/field_behavior.proto";
-import "google/firestore/v1beta1/common.proto";
-import "google/firestore/v1beta1/document.proto";
-import "google/firestore/v1beta1/query.proto";
-import "google/firestore/v1beta1/write.proto";
-import "google/protobuf/empty.proto";
-import "google/protobuf/timestamp.proto";
-import "google/rpc/status.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "FirestoreProto";
-option java_package = "com.google.firestore.v1beta1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
-
-// Specification of the Firestore API.
-
-// The Cloud Firestore service.
-//
-// This service exposes several types of comparable timestamps:
-//
-// * `create_time` - The time at which a document was created. Changes only
-// when a document is deleted, then re-created. Increases in a strict
-// monotonic fashion.
-// * `update_time` - The time at which a document was last updated. Changes
-// every time a document is modified. Does not change when a write results
-// in no modifications. Increases in a strict monotonic fashion.
-// * `read_time` - The time at which a particular state was observed. Used
-// to denote a consistent snapshot of the database or the time at which a
-// Document was observed to not exist.
-// * `commit_time` - The time at which the writes in a transaction were
-// committed. Any read with an equal or greater `read_time` is guaranteed
-// to see the effects of the transaction.
-service Firestore {
- option (google.api.default_host) = "firestore.googleapis.com";
- option (google.api.oauth_scopes) =
- "https://www.googleapis.com/auth/cloud-platform,"
- "https://www.googleapis.com/auth/datastore";
-
- // Gets a single document.
- rpc GetDocument(GetDocumentRequest) returns (Document) {
- option (google.api.http) = {
- get: "/v1beta1/{name=projects/*/databases/*/documents/*/**}"
- };
- }
-
- // Lists documents.
- rpc ListDocuments(ListDocumentsRequest) returns (ListDocumentsResponse) {
- option (google.api.http) = {
- get: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}"
- };
- }
-
- // Creates a new document.
- rpc CreateDocument(CreateDocumentRequest) returns (Document) {
- option (google.api.http) = {
- post: "/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}"
- body: "document"
- };
- }
-
- // Updates or inserts a document.
- rpc UpdateDocument(UpdateDocumentRequest) returns (Document) {
- option (google.api.http) = {
- patch: "/v1beta1/{document.name=projects/*/databases/*/documents/*/**}"
- body: "document"
- };
- option (google.api.method_signature) = "document,update_mask";
- }
-
- // Deletes a document.
- rpc DeleteDocument(DeleteDocumentRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- delete: "/v1beta1/{name=projects/*/databases/*/documents/*/**}"
- };
- option (google.api.method_signature) = "name";
- }
-
- // Gets multiple documents.
- //
- // Documents returned by this method are not guaranteed to be returned in the
- // same order that they were requested.
- rpc BatchGetDocuments(BatchGetDocumentsRequest) returns (stream BatchGetDocumentsResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{database=projects/*/databases/*}/documents:batchGet"
- body: "*"
- };
- }
-
- // Starts a new transaction.
- rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction"
- body: "*"
- };
- option (google.api.method_signature) = "database";
- }
-
- // Commits a transaction, while optionally updating documents.
- rpc Commit(CommitRequest) returns (CommitResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{database=projects/*/databases/*}/documents:commit"
- body: "*"
- };
- option (google.api.method_signature) = "database,writes";
- }
-
- // Rolls back a transaction.
- rpc Rollback(RollbackRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- post: "/v1beta1/{database=projects/*/databases/*}/documents:rollback"
- body: "*"
- };
- option (google.api.method_signature) = "database,transaction";
- }
-
- // Runs a query.
- rpc RunQuery(RunQueryRequest) returns (stream RunQueryResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{parent=projects/*/databases/*/documents}:runQuery"
- body: "*"
- additional_bindings {
- post: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery"
- body: "*"
- }
- };
- }
-
- // Streams batches of document updates and deletes, in order.
- rpc Write(stream WriteRequest) returns (stream WriteResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{database=projects/*/databases/*}/documents:write"
- body: "*"
- };
- }
-
- // Listens to changes.
- rpc Listen(stream ListenRequest) returns (stream ListenResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{database=projects/*/databases/*}/documents:listen"
- body: "*"
- };
- }
-
- // Lists all the collection IDs underneath a document.
- rpc ListCollectionIds(ListCollectionIdsRequest) returns (ListCollectionIdsResponse) {
- option (google.api.http) = {
- post: "/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds"
- body: "*"
- additional_bindings {
- post: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds"
- body: "*"
- }
- };
- option (google.api.method_signature) = "parent";
- }
-}
-
-// The request for [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument].
-message GetDocumentRequest {
- // Required. The resource name of the Document to get. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string name = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The fields to return. If not set, returns all fields.
- //
- // If the document has a field that is not present in this mask, that field
- // will not be returned in the response.
- DocumentMask mask = 2;
-
- // The consistency mode for this transaction.
- // If not set, defaults to strong consistency.
- oneof consistency_selector {
- // Reads the document in a transaction.
- bytes transaction = 3;
-
- // Reads the version of the document at the given time.
- // This may not be older than 60 seconds.
- google.protobuf.Timestamp read_time = 5;
- }
-}
-
-// The request for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
-message ListDocumentsRequest {
- // Required. The parent resource name. In the format:
- // `projects/{project_id}/databases/{database_id}/documents` or
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // For example:
- // `projects/my-project/databases/my-database/documents` or
- // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
-
- // Required. The collection ID, relative to `parent`, to list. For example: `chatrooms`
- // or `messages`.
- string collection_id = 2 [(google.api.field_behavior) = REQUIRED];
-
- // The maximum number of documents to return.
- int32 page_size = 3;
-
- // The `next_page_token` value returned from a previous List request, if any.
- string page_token = 4;
-
- // The order to sort results by. For example: `priority desc, name`.
- string order_by = 6;
-
- // The fields to return. If not set, returns all fields.
- //
- // If a document has a field that is not present in this mask, that field
- // will not be returned in the response.
- DocumentMask mask = 7;
-
- // The consistency mode for this transaction.
- // If not set, defaults to strong consistency.
- oneof consistency_selector {
- // Reads documents in a transaction.
- bytes transaction = 8;
-
- // Reads documents as they were at the given time.
- // This may not be older than 60 seconds.
- google.protobuf.Timestamp read_time = 10;
- }
-
- // If the list should show missing documents. A missing document is a
- // document that does not exist but has sub-documents. These documents will
- // be returned with a key but will not have fields, [Document.create_time][google.firestore.v1beta1.Document.create_time],
- // or [Document.update_time][google.firestore.v1beta1.Document.update_time] set.
- //
- // Requests with `show_missing` may not specify `where` or
- // `order_by`.
- bool show_missing = 12;
-}
-
-// The response for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
-message ListDocumentsResponse {
- // The Documents found.
- repeated Document documents = 1;
-
- // The next page token.
- string next_page_token = 2;
-}
-
-// The request for [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument].
-message CreateDocumentRequest {
- // Required. The parent resource. For example:
- // `projects/{project_id}/databases/{database_id}/documents` or
- // `projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
-
- // Required. The collection ID, relative to `parent`, to list. For example: `chatrooms`.
- string collection_id = 2 [(google.api.field_behavior) = REQUIRED];
-
- // The client-assigned document ID to use for this document.
- //
- // Optional. If not specified, an ID will be assigned by the service.
- string document_id = 3;
-
- // Required. The document to create. `name` must not be set.
- Document document = 4 [(google.api.field_behavior) = REQUIRED];
-
- // The fields to return. If not set, returns all fields.
- //
- // If the document has a field that is not present in this mask, that field
- // will not be returned in the response.
- DocumentMask mask = 5;
-}
-
-// The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument].
-message UpdateDocumentRequest {
- // Required. The updated document.
- // Creates the document if it does not already exist.
- Document document = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The fields to update.
- // None of the field paths in the mask may contain a reserved name.
- //
- // If the document exists on the server and has fields not referenced in the
- // mask, they are left unchanged.
- // Fields referenced in the mask, but not present in the input document, are
- // deleted from the document on the server.
- DocumentMask update_mask = 2;
-
- // The fields to return. If not set, returns all fields.
- //
- // If the document has a field that is not present in this mask, that field
- // will not be returned in the response.
- DocumentMask mask = 3;
-
- // An optional precondition on the document.
- // The request will fail if this is set and not met by the target document.
- Precondition current_document = 4;
-}
-
-// The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument].
-message DeleteDocumentRequest {
- // Required. The resource name of the Document to delete. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string name = 1 [(google.api.field_behavior) = REQUIRED];
-
- // An optional precondition on the document.
- // The request will fail if this is set and not met by the target document.
- Precondition current_document = 2;
-}
-
-// The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
-message BatchGetDocumentsRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The names of the documents to retrieve. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // The request will fail if any of the document is not a child resource of the
- // given `database`. Duplicate names will be elided.
- repeated string documents = 2;
-
- // The fields to return. If not set, returns all fields.
- //
- // If a document has a field that is not present in this mask, that field will
- // not be returned in the response.
- DocumentMask mask = 3;
-
- // The consistency mode for this transaction.
- // If not set, defaults to strong consistency.
- oneof consistency_selector {
- // Reads documents in a transaction.
- bytes transaction = 4;
-
- // Starts a new transaction and reads the documents.
- // Defaults to a read-only transaction.
- // The new transaction ID will be returned as the first response in the
- // stream.
- TransactionOptions new_transaction = 5;
-
- // Reads documents as they were at the given time.
- // This may not be older than 60 seconds.
- google.protobuf.Timestamp read_time = 7;
- }
-}
-
-// The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
-message BatchGetDocumentsResponse {
- // A single result.
- // This can be empty if the server is just returning a transaction.
- oneof result {
- // A document that was requested.
- Document found = 1;
-
- // A document name that was requested but does not exist. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string missing = 2;
- }
-
- // The transaction that was started as part of this request.
- // Will only be set in the first response, and only if
- // [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] was set in the request.
- bytes transaction = 3;
-
- // The time at which the document was read.
- // This may be monotically increasing, in this case the previous documents in
- // the result stream are guaranteed not to have changed between their
- // read_time and this one.
- google.protobuf.Timestamp read_time = 4;
-}
-
-// The request for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
-message BeginTransactionRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The options for the transaction.
- // Defaults to a read-write transaction.
- TransactionOptions options = 2;
-}
-
-// The response for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
-message BeginTransactionResponse {
- // The transaction that was started.
- bytes transaction = 1;
-}
-
-// The request for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
-message CommitRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The writes to apply.
- //
- // Always executed atomically and in order.
- repeated Write writes = 2;
-
- // If set, applies all writes in this transaction, and commits it.
- bytes transaction = 3;
-}
-
-// The response for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
-message CommitResponse {
- // The result of applying the writes.
- //
- // This i-th write result corresponds to the i-th write in the
- // request.
- repeated WriteResult write_results = 1;
-
- // The time at which the commit occurred.
- google.protobuf.Timestamp commit_time = 2;
-}
-
-// The request for [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback].
-message RollbackRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // Required. The transaction to roll back.
- bytes transaction = 2 [(google.api.field_behavior) = REQUIRED];
-}
-
-// The request for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
-message RunQueryRequest {
- // Required. The parent resource name. In the format:
- // `projects/{project_id}/databases/{database_id}/documents` or
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // For example:
- // `projects/my-project/databases/my-database/documents` or
- // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The query to run.
- oneof query_type {
- // A structured query.
- StructuredQuery structured_query = 2;
- }
-
- // The consistency mode for this transaction.
- // If not set, defaults to strong consistency.
- oneof consistency_selector {
- // Reads documents in a transaction.
- bytes transaction = 5;
-
- // Starts a new transaction and reads the documents.
- // Defaults to a read-only transaction.
- // The new transaction ID will be returned as the first response in the
- // stream.
- TransactionOptions new_transaction = 6;
-
- // Reads documents as they were at the given time.
- // This may not be older than 60 seconds.
- google.protobuf.Timestamp read_time = 7;
- }
-}
-
-// The response for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
-message RunQueryResponse {
- // The transaction that was started as part of this request.
- // Can only be set in the first response, and only if
- // [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] was set in the request.
- // If set, no other fields will be set in this response.
- bytes transaction = 2;
-
- // A query result.
- // Not set when reporting partial progress.
- Document document = 1;
-
- // The time at which the document was read. This may be monotonically
- // increasing; in this case, the previous documents in the result stream are
- // guaranteed not to have changed between their `read_time` and this one.
- //
- // If the query returns no results, a response with `read_time` and no
- // `document` will be sent, and this represents the time at which the query
- // was run.
- google.protobuf.Timestamp read_time = 3;
-
- // The number of results that have been skipped due to an offset between
- // the last response and the current response.
- int32 skipped_results = 4;
-}
-
-// The request for [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
-//
-// The first request creates a stream, or resumes an existing one from a token.
-//
-// When creating a new stream, the server replies with a response containing
-// only an ID and a token, to use in the next request.
-//
-// When resuming a stream, the server first streams any responses later than the
-// given token, then a response containing only an up-to-date token, to use in
-// the next request.
-message WriteRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- // This is only required in the first message.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The ID of the write stream to resume.
- // This may only be set in the first message. When left empty, a new write
- // stream will be created.
- string stream_id = 2;
-
- // The writes to apply.
- //
- // Always executed atomically and in order.
- // This must be empty on the first request.
- // This may be empty on the last request.
- // This must not be empty on all other requests.
- repeated Write writes = 3;
-
- // A stream token that was previously sent by the server.
- //
- // The client should set this field to the token from the most recent
- // [WriteResponse][google.firestore.v1beta1.WriteResponse] it has received. This acknowledges that the client has
- // received responses up to this token. After sending this token, earlier
- // tokens may not be used anymore.
- //
- // The server may close the stream if there are too many unacknowledged
- // responses.
- //
- // Leave this field unset when creating a new stream. To resume a stream at
- // a specific point, set this field and the `stream_id` field.
- //
- // Leave this field unset when creating a new stream.
- bytes stream_token = 4;
-
- // Labels associated with this write request.
- map labels = 5;
-}
-
-// The response for [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
-message WriteResponse {
- // The ID of the stream.
- // Only set on the first message, when a new stream was created.
- string stream_id = 1;
-
- // A token that represents the position of this response in the stream.
- // This can be used by a client to resume the stream at this point.
- //
- // This field is always set.
- bytes stream_token = 2;
-
- // The result of applying the writes.
- //
- // This i-th write result corresponds to the i-th write in the
- // request.
- repeated WriteResult write_results = 3;
-
- // The time at which the commit occurred.
- google.protobuf.Timestamp commit_time = 4;
-}
-
-// A request for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]
-message ListenRequest {
- // Required. The database name. In the format:
- // `projects/{project_id}/databases/{database_id}`.
- string database = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The supported target changes.
- oneof target_change {
- // A target to add to this stream.
- Target add_target = 2;
-
- // The ID of a target to remove from this stream.
- int32 remove_target = 3;
- }
-
- // Labels associated with this target change.
- map labels = 4;
-}
-
-// The response for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen].
-message ListenResponse {
- // The supported responses.
- oneof response_type {
- // Targets have changed.
- TargetChange target_change = 2;
-
- // A [Document][google.firestore.v1beta1.Document] has changed.
- DocumentChange document_change = 3;
-
- // A [Document][google.firestore.v1beta1.Document] has been deleted.
- DocumentDelete document_delete = 4;
-
- // A [Document][google.firestore.v1beta1.Document] has been removed from a target (because it is no longer
- // relevant to that target).
- DocumentRemove document_remove = 6;
-
- // A filter to apply to the set of documents previously returned for the
- // given target.
- //
- // Returned when documents may have been removed from the given target, but
- // the exact documents are unknown.
- ExistenceFilter filter = 5;
- }
-}
-
-// A specification of a set of documents to listen to.
-message Target {
- // A target specified by a set of documents names.
- message DocumentsTarget {
- // The names of the documents to retrieve. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // The request will fail if any of the document is not a child resource of
- // the given `database`. Duplicate names will be elided.
- repeated string documents = 2;
- }
-
- // A target specified by a query.
- message QueryTarget {
- // The parent resource name. In the format:
- // `projects/{project_id}/databases/{database_id}/documents` or
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // For example:
- // `projects/my-project/databases/my-database/documents` or
- // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
- string parent = 1;
-
- // The query to run.
- oneof query_type {
- // A structured query.
- StructuredQuery structured_query = 2;
- }
- }
-
- // The type of target to listen to.
- oneof target_type {
- // A target specified by a query.
- QueryTarget query = 2;
-
- // A target specified by a set of document names.
- DocumentsTarget documents = 3;
- }
-
- // When to start listening.
- //
- // If not specified, all matching Documents are returned before any
- // subsequent changes.
- oneof resume_type {
- // A resume token from a prior [TargetChange][google.firestore.v1beta1.TargetChange] for an identical target.
- //
- // Using a resume token with a different target is unsupported and may fail.
- bytes resume_token = 4;
-
- // Start listening after a specific `read_time`.
- //
- // The client must know the state of matching documents at this time.
- google.protobuf.Timestamp read_time = 11;
- }
-
- // The target ID that identifies the target on the stream. Must be a positive
- // number and non-zero.
- int32 target_id = 5;
-
- // If the target should be removed once it is current and consistent.
- bool once = 6;
-}
-
-// Targets being watched have changed.
-message TargetChange {
- // The type of change.
- enum TargetChangeType {
- // No change has occurred. Used only to send an updated `resume_token`.
- NO_CHANGE = 0;
-
- // The targets have been added.
- ADD = 1;
-
- // The targets have been removed.
- REMOVE = 2;
-
- // The targets reflect all changes committed before the targets were added
- // to the stream.
- //
- // This will be sent after or with a `read_time` that is greater than or
- // equal to the time at which the targets were added.
- //
- // Listeners can wait for this change if read-after-write semantics
- // are desired.
- CURRENT = 3;
-
- // The targets have been reset, and a new initial state for the targets
- // will be returned in subsequent changes.
- //
- // After the initial state is complete, `CURRENT` will be returned even
- // if the target was previously indicated to be `CURRENT`.
- RESET = 4;
- }
-
- // The type of change that occurred.
- TargetChangeType target_change_type = 1;
-
- // The target IDs of targets that have changed.
- //
- // If empty, the change applies to all targets.
- //
- // The order of the target IDs is not defined.
- repeated int32 target_ids = 2;
-
- // The error that resulted in this change, if applicable.
- google.rpc.Status cause = 3;
-
- // A token that can be used to resume the stream for the given `target_ids`,
- // or all targets if `target_ids` is empty.
- //
- // Not set on every target change.
- bytes resume_token = 4;
-
- // The consistent `read_time` for the given `target_ids` (omitted when the
- // target_ids are not at a consistent snapshot).
- //
- // The stream is guaranteed to send a `read_time` with `target_ids` empty
- // whenever the entire stream reaches a new consistent snapshot. ADD,
- // CURRENT, and RESET messages are guaranteed to (eventually) result in a
- // new consistent snapshot (while NO_CHANGE and REMOVE messages are not).
- //
- // For a given stream, `read_time` is guaranteed to be monotonically
- // increasing.
- google.protobuf.Timestamp read_time = 6;
-}
-
-// The request for [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
-message ListCollectionIdsRequest {
- // Required. The parent document. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- // For example:
- // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
-
- // The maximum number of results to return.
- int32 page_size = 2;
-
- // A page token. Must be a value from
- // [ListCollectionIdsResponse][google.firestore.v1beta1.ListCollectionIdsResponse].
- string page_token = 3;
-}
-
-// The response from [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
-message ListCollectionIdsResponse {
- // The collection ids.
- repeated string collection_ids = 1;
-
- // A page token that may be used to continue the list.
- string next_page_token = 2;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/firestore_admin.proto b/google/cloud/firestore_v1beta1/proto/firestore_admin.proto
deleted file mode 100644
index 15ce94da6b..0000000000
--- a/google/cloud/firestore_v1beta1/proto/firestore_admin.proto
+++ /dev/null
@@ -1,365 +0,0 @@
-// Copyright 2018 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1beta1;
-
-import "google/api/annotations.proto";
-import "google/firestore/admin/v1beta1/index.proto";
-import "google/longrunning/operations.proto";
-import "google/protobuf/empty.proto";
-import "google/protobuf/timestamp.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "FirestoreAdminProto";
-option java_package = "com.google.firestore.admin.v1beta1";
-option objc_class_prefix = "GCFS";
-
-
-// The Cloud Firestore Admin API.
-//
-// This API provides several administrative services for Cloud Firestore.
-//
-// # Concepts
-//
-// Project, Database, Namespace, Collection, and Document are used as defined in
-// the Google Cloud Firestore API.
-//
-// Operation: An Operation represents work being performed in the background.
-//
-//
-// # Services
-//
-// ## Index
-//
-// The index service manages Cloud Firestore indexes.
-//
-// Index creation is performed asynchronously.
-// An Operation resource is created for each such asynchronous operation.
-// The state of the operation (including any errors encountered)
-// may be queried via the Operation resource.
-//
-// ## Metadata
-//
-// Provides metadata and statistical information about data in Cloud Firestore.
-// The data provided as part of this API may be stale.
-//
-// ## Operation
-//
-// The Operations collection provides a record of actions performed for the
-// specified Project (including any Operations in progress). Operations are not
-// created directly but through calls on other collections or resources.
-//
-// An Operation that is not yet done may be cancelled. The request to cancel is
-// asynchronous and the Operation may continue to run for some time after the
-// request to cancel is made.
-//
-// An Operation that is done may be deleted so that it is no longer listed as
-// part of the Operation collection.
-//
-// Operations are created by service `FirestoreAdmin`, but are accessed via
-// service `google.longrunning.Operations`.
-service FirestoreAdmin {
- // Creates the specified index.
- // A newly created index's initial state is `CREATING`. On completion of the
- // returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`.
- // If the index already exists, the call will return an `ALREADY_EXISTS`
- // status.
- //
- // During creation, the process could result in an error, in which case the
- // index will move to the `ERROR` state. The process can be recovered by
- // fixing the data that caused the error, removing the index with
- // [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with
- // [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
- //
- // Indexes with a single field cannot be created.
- rpc CreateIndex(CreateIndexRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- post: "/v1beta1/{parent=projects/*/databases/*}/indexes"
- body: "index"
- };
- }
-
- // Lists the indexes that match the specified filters.
- rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) {
- option (google.api.http) = {
- get: "/v1beta1/{parent=projects/*/databases/*}/indexes"
- };
- }
-
- // Gets an index.
- rpc GetIndex(GetIndexRequest) returns (Index) {
- option (google.api.http) = {
- get: "/v1beta1/{name=projects/*/databases/*/indexes/*}"
- };
- }
-
- // Deletes an index.
- rpc DeleteIndex(DeleteIndexRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- delete: "/v1beta1/{name=projects/*/databases/*/indexes/*}"
- };
- }
-
- // Exports a copy of all or a subset of documents from Google Cloud Firestore
- // to another storage system, such as Google Cloud Storage. Recent updates to
- // documents may not be reflected in the export. The export occurs in the
- // background and its progress can be monitored and managed via the
- // Operation resource that is created. The output of an export may only be
- // used once the associated operation is done. If an export operation is
- // cancelled before completion it may leave partial data behind in Google
- // Cloud Storage.
- rpc ExportDocuments(ExportDocumentsRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- post: "/v1beta1/{name=projects/*/databases/*}:exportDocuments"
- body: "*"
- };
- }
-
- // Imports documents into Google Cloud Firestore. Existing documents with the
- // same name are overwritten. The import occurs in the background and its
- // progress can be monitored and managed via the Operation resource that is
- // created. If an ImportDocuments operation is cancelled, it is possible
- // that a subset of the data has already been imported to Cloud Firestore.
- rpc ImportDocuments(ImportDocumentsRequest) returns (google.longrunning.Operation) {
- option (google.api.http) = {
- post: "/v1beta1/{name=projects/*/databases/*}:importDocuments"
- body: "*"
- };
- }
-}
-
-// Metadata for index operations. This metadata populates
-// the metadata field of [google.longrunning.Operation][google.longrunning.Operation].
-message IndexOperationMetadata {
- // The type of index operation.
- enum OperationType {
- // Unspecified. Never set by server.
- OPERATION_TYPE_UNSPECIFIED = 0;
-
- // The operation is creating the index. Initiated by a `CreateIndex` call.
- CREATING_INDEX = 1;
- }
-
- // The time that work began on the operation.
- google.protobuf.Timestamp start_time = 1;
-
- // The time the operation ended, either successfully or otherwise. Unset if
- // the operation is still active.
- google.protobuf.Timestamp end_time = 2;
-
- // The index resource that this operation is acting on. For example:
- // `projects/{project_id}/databases/{database_id}/indexes/{index_id}`
- string index = 3;
-
- // The type of index operation.
- OperationType operation_type = 4;
-
- // True if the [google.longrunning.Operation] was cancelled. If the
- // cancellation is in progress, cancelled will be true but
- // [google.longrunning.Operation.done][google.longrunning.Operation.done] will be false.
- bool cancelled = 5;
-
- // Progress of the existing operation, measured in number of documents.
- Progress document_progress = 6;
-}
-
-// Measures the progress of a particular metric.
-message Progress {
- // An estimate of how much work has been completed. Note that this may be
- // greater than `work_estimated`.
- int64 work_completed = 1;
-
- // An estimate of how much work needs to be performed. Zero if the
- // work estimate is unavailable. May change as work progresses.
- int64 work_estimated = 2;
-}
-
-// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex].
-message CreateIndexRequest {
- // The name of the database this index will apply to. For example:
- // `projects/{project_id}/databases/{database_id}`
- string parent = 1;
-
- // The index to create. The name and state fields are output only and will be
- // ignored. Certain single field indexes cannot be created or deleted.
- Index index = 2;
-}
-
-// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex].
-message GetIndexRequest {
- // The name of the index. For example:
- // `projects/{project_id}/databases/{database_id}/indexes/{index_id}`
- string name = 1;
-}
-
-// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
-message ListIndexesRequest {
- // The database name. For example:
- // `projects/{project_id}/databases/{database_id}`
- string parent = 1;
-
- string filter = 2;
-
- // The standard List page size.
- int32 page_size = 3;
-
- // The standard List page token.
- string page_token = 4;
-}
-
-// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex].
-message DeleteIndexRequest {
- // The index name. For example:
- // `projects/{project_id}/databases/{database_id}/indexes/{index_id}`
- string name = 1;
-}
-
-// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes].
-message ListIndexesResponse {
- // The indexes.
- repeated Index indexes = 1;
-
- // The standard List next-page token.
- string next_page_token = 2;
-}
-
-// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta1.FirestoreAdmin.ExportDocuments].
-message ExportDocumentsRequest {
- // Database to export. Should be of the form:
- // `projects/{project_id}/databases/{database_id}`.
- string name = 1;
-
- // Which collection ids to export. Unspecified means all collections.
- repeated string collection_ids = 3;
-
- // The output URI. Currently only supports Google Cloud Storage URIs of the
- // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name
- // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional
- // Google Cloud Storage namespace path. When
- // choosing a name, be sure to consider Google Cloud Storage naming
- // guidelines: https://cloud.google.com/storage/docs/naming.
- // If the URI is a bucket (without a namespace path), a prefix will be
- // generated based on the start time.
- string output_uri_prefix = 4;
-}
-
-// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta1.FirestoreAdmin.ImportDocuments].
-message ImportDocumentsRequest {
- // Database to import into. Should be of the form:
- // `projects/{project_id}/databases/{database_id}`.
- string name = 1;
-
- // Which collection ids to import. Unspecified means all collections included
- // in the import.
- repeated string collection_ids = 3;
-
- // Location of the exported files.
- // This must match the output_uri_prefix of an ExportDocumentsResponse from
- // an export that has completed successfully.
- // See:
- // [google.firestore.admin.v1beta1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1beta1.ExportDocumentsResponse.output_uri_prefix].
- string input_uri_prefix = 4;
-}
-
-// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
-message ExportDocumentsResponse {
- // Location of the output files. This can be used to begin an import
- // into Cloud Firestore (this project or another project) after the operation
- // completes successfully.
- string output_uri_prefix = 1;
-}
-
-// Metadata for ExportDocuments operations.
-message ExportDocumentsMetadata {
- // The time that work began on the operation.
- google.protobuf.Timestamp start_time = 1;
-
- // The time the operation ended, either successfully or otherwise. Unset if
- // the operation is still active.
- google.protobuf.Timestamp end_time = 2;
-
- // The state of the export operation.
- OperationState operation_state = 3;
-
- // An estimate of the number of documents processed.
- Progress progress_documents = 4;
-
- // An estimate of the number of bytes processed.
- Progress progress_bytes = 5;
-
- // Which collection ids are being exported.
- repeated string collection_ids = 6;
-
- // Where the entities are being exported to.
- string output_uri_prefix = 7;
-}
-
-// Metadata for ImportDocuments operations.
-message ImportDocumentsMetadata {
- // The time that work began on the operation.
- google.protobuf.Timestamp start_time = 1;
-
- // The time the operation ended, either successfully or otherwise. Unset if
- // the operation is still active.
- google.protobuf.Timestamp end_time = 2;
-
- // The state of the import operation.
- OperationState operation_state = 3;
-
- // An estimate of the number of documents processed.
- Progress progress_documents = 4;
-
- // An estimate of the number of bytes processed.
- Progress progress_bytes = 5;
-
- // Which collection ids are being imported.
- repeated string collection_ids = 6;
-
- // The location of the documents being imported.
- string input_uri_prefix = 7;
-}
-
-// The various possible states for an ongoing Operation.
-enum OperationState {
- // Unspecified.
- STATE_UNSPECIFIED = 0;
-
- // Request is being prepared for processing.
- INITIALIZING = 1;
-
- // Request is actively being processed.
- PROCESSING = 2;
-
- // Request is in the process of being cancelled after user called
- // google.longrunning.Operations.CancelOperation on the operation.
- CANCELLING = 3;
-
- // Request has been processed and is in its finalization stage.
- FINALIZING = 4;
-
- // Request has completed successfully.
- SUCCESSFUL = 5;
-
- // Request has finished being processed, but encountered an error.
- FAILED = 6;
-
- // Request has finished being cancelled after user called
- // google.longrunning.Operations.CancelOperation.
- CANCELLED = 7;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/firestore_pb2.py b/google/cloud/firestore_v1beta1/proto/firestore_pb2.py
deleted file mode 100644
index 7d29eb882c..0000000000
--- a/google/cloud/firestore_v1beta1/proto/firestore_pb2.py
+++ /dev/null
@@ -1,3803 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/firestore.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.api import client_pb2 as google_dot_api_dot_client__pb2
-from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
-from google.cloud.firestore_v1beta1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- write_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/firestore.proto",
- package="google.firestore.v1beta1",
- syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.firestore.v1beta1B\016FirestoreProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1"
- ),
- serialized_pb=_b(
- '\n4google/cloud/firestore_v1beta1/proto/firestore.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x30google/cloud/firestore_v1beta1/proto/write.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xbd\x01\n\x12GetDocumentRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x04mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x03 \x01(\x0cH\x00\x12/\n\tread_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xac\x02\n\x14ListDocumentsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1a\n\rcollection_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x34\n\x04mask\x18\x07 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x08 \x01(\x0cH\x00\x12/\n\tread_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x14\n\x0cshow_missing\x18\x0c \x01(\x08\x42\x16\n\x14\x63onsistency_selector"g\n\x15ListDocumentsResponse\x12\x35\n\tdocuments\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xce\x01\n\x15\x43reateDocumentRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1a\n\rcollection_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x0b\x64ocument_id\x18\x03 \x01(\t\x12\x39\n\x08\x64ocument\x18\x04 \x01(\x0b\x32".google.firestore.v1beta1.DocumentB\x03\xe0\x41\x02\x12\x34\n\x04mask\x18\x05 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask"\x87\x02\n\x15UpdateDocumentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentB\x03\xe0\x41\x02\x12;\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"l\n\x15\x44\x65leteDocumentRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x10\x63urrent_document\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"\xa3\x02\n\x18\x42\x61tchGetDocumentsRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tdocuments\x18\x02 \x03(\t\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x04 \x01(\x0cH\x00\x12G\n\x0fnew_transaction\x18\x05 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x00\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xb1\x01\n\x19\x42\x61tchGetDocumentsResponse\x12\x33\n\x05\x66ound\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x11\n\x07missing\x18\x02 \x01(\tH\x00\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\n\x06result"o\n\x17\x42\x65ginTransactionRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12=\n\x07options\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptions"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c"l\n\rCommitRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12/\n\x06writes\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c"\x7f\n\x0e\x43ommitResponse\x12<\n\rwrite_results\x18\x01 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"B\n\x0fRollbackRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0btransaction\x18\x02 \x01(\x0c\x42\x03\xe0\x41\x02"\xa4\x02\n\x0fRunQueryRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x12\x15\n\x0btransaction\x18\x05 \x01(\x0cH\x01\x12G\n\x0fnew_transaction\x18\x06 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x01\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x42\x0c\n\nquery_typeB\x16\n\x14\x63onsistency_selector"\xa5\x01\n\x10RunQueryResponse\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0fskipped_results\x18\x04 \x01(\x05"\xf2\x01\n\x0cWriteRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12/\n\x06writes\x18\x03 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x14\n\x0cstream_token\x18\x04 \x01(\x0c\x12\x42\n\x06labels\x18\x05 \x03(\x0b\x32\x32.google.firestore.v1beta1.WriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa7\x01\n\rWriteResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x14\n\x0cstream_token\x18\x02 \x01(\x0c\x12<\n\rwrite_results\x18\x03 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xfc\x01\n\rListenRequest\x12\x15\n\x08\x64\x61tabase\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\nadd_target\x18\x02 \x01(\x0b\x32 .google.firestore.v1beta1.TargetH\x00\x12\x17\n\rremove_target\x18\x03 \x01(\x05H\x00\x12\x43\n\x06labels\x18\x04 \x03(\x0b\x32\x33.google.firestore.v1beta1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xee\x02\n\x0eListenResponse\x12?\n\rtarget_change\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.TargetChangeH\x00\x12\x43\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentChangeH\x00\x12\x43\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentDeleteH\x00\x12\x43\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentRemoveH\x00\x12;\n\x06\x66ilter\x18\x05 \x01(\x0b\x32).google.firestore.v1beta1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xb0\x03\n\x06Target\x12=\n\x05query\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.Target.QueryTargetH\x00\x12\x45\n\tdocuments\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1ar\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaf\x02\n\x0cTargetChange\x12S\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x37.google.firestore.v1beta1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"V\n\x18ListCollectionIdsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x9b\x15\n\tFirestore\x12\x9e\x01\n\x0bGetDocument\x12,.google.firestore.v1beta1.GetDocumentRequest\x1a".google.firestore.v1beta1.Document"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc1\x01\n\rListDocuments\x12..google.firestore.v1beta1.ListDocumentsRequest\x1a/.google.firestore.v1beta1.ListDocumentsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbe\x01\n\x0e\x43reateDocument\x12/.google.firestore.v1beta1.CreateDocumentRequest\x1a".google.firestore.v1beta1.Document"W\x82\xd3\xe4\x93\x02Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xce\x01\n\x0eUpdateDocument\x12/.google.firestore.v1beta1.UpdateDocumentRequest\x1a".google.firestore.v1beta1.Document"g\x82\xd3\xe4\x93\x02J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\xda\x41\x14\x64ocument,update_mask\x12\x9f\x01\n\x0e\x44\x65leteDocument\x12/.google.firestore.v1beta1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\xda\x41\x04name\x12\xc8\x01\n\x11\x42\x61tchGetDocuments\x12\x32.google.firestore.v1beta1.BatchGetDocumentsRequest\x1a\x33.google.firestore.v1beta1.BatchGetDocumentsResponse"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xd6\x01\n\x10\x42\x65ginTransaction\x12\x31.google.firestore.v1beta1.BeginTransactionRequest\x1a\x32.google.firestore.v1beta1.BeginTransactionResponse"[\x82\xd3\xe4\x93\x02J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\xda\x41\x08\x64\x61tabase\x12\xb5\x01\n\x06\x43ommit\x12\'.google.firestore.v1beta1.CommitRequest\x1a(.google.firestore.v1beta1.CommitResponse"X\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\x01*\xda\x41\x0f\x64\x61tabase,writes\x12\xae\x01\n\x08Rollback\x12).google.firestore.v1beta1.RollbackRequest\x1a\x16.google.protobuf.Empty"_\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\x01*\xda\x41\x14\x64\x61tabase,transaction\x12\xf4\x01\n\x08RunQuery\x12).google.firestore.v1beta1.RunQueryRequest\x1a*.google.firestore.v1beta1.RunQueryResponse"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\x01*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xa3\x01\n\x05Write\x12&.google.firestore.v1beta1.WriteRequest\x1a\'.google.firestore.v1beta1.WriteResponse"E\x82\xd3\xe4\x93\x02?":/v1beta1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\xa7\x01\n\x06Listen\x12\'.google.firestore.v1beta1.ListenRequest\x1a(.google.firestore.v1beta1.ListenResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\xa8\x02\n\x11ListCollectionIds\x12\x32.google.firestore.v1beta1.ListCollectionIdsRequest\x1a\x33.google.firestore.v1beta1.ListCollectionIdsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\x99\x01"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*\xda\x41\x06parent\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xbc\x01\n\x1c\x63om.google.firestore.v1beta1B\x0e\x46irestoreProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_api_dot_client__pb2.DESCRIPTOR,
- google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- google_dot_rpc_dot_status__pb2.DESCRIPTOR,
- ],
-)
-
-
-_TARGETCHANGE_TARGETCHANGETYPE = _descriptor.EnumDescriptor(
- name="TargetChangeType",
- full_name="google.firestore.v1beta1.TargetChange.TargetChangeType",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="NO_CHANGE", index=0, number=0, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ADD", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REMOVE", index=2, number=2, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="CURRENT", index=3, number=3, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="RESET", index=4, number=4, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=4752,
- serialized_end=4830,
-)
-_sym_db.RegisterEnumDescriptor(_TARGETCHANGE_TARGETCHANGETYPE)
-
-
-_GETDOCUMENTREQUEST = _descriptor.Descriptor(
- name="GetDocumentRequest",
- full_name="google.firestore.v1beta1.GetDocumentRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.v1beta1.GetDocumentRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="mask",
- full_name="google.firestore.v1beta1.GetDocumentRequest.mask",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.GetDocumentRequest.transaction",
- index=2,
- number=3,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.GetDocumentRequest.read_time",
- index=3,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="consistency_selector",
- full_name="google.firestore.v1beta1.GetDocumentRequest.consistency_selector",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=462,
- serialized_end=651,
-)
-
-
-_LISTDOCUMENTSREQUEST = _descriptor.Descriptor(
- name="ListDocumentsRequest",
- full_name="google.firestore.v1beta1.ListDocumentsRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="collection_id",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.collection_id",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_size",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.page_size",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_token",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.page_token",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="order_by",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.order_by",
- index=4,
- number=6,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="mask",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.mask",
- index=5,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.transaction",
- index=6,
- number=8,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.read_time",
- index=7,
- number=10,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="show_missing",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.show_missing",
- index=8,
- number=12,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="consistency_selector",
- full_name="google.firestore.v1beta1.ListDocumentsRequest.consistency_selector",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=654,
- serialized_end=954,
-)
-
-
-_LISTDOCUMENTSRESPONSE = _descriptor.Descriptor(
- name="ListDocumentsResponse",
- full_name="google.firestore.v1beta1.ListDocumentsResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="documents",
- full_name="google.firestore.v1beta1.ListDocumentsResponse.documents",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="next_page_token",
- full_name="google.firestore.v1beta1.ListDocumentsResponse.next_page_token",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=956,
- serialized_end=1059,
-)
-
-
-_CREATEDOCUMENTREQUEST = _descriptor.Descriptor(
- name="CreateDocumentRequest",
- full_name="google.firestore.v1beta1.CreateDocumentRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.v1beta1.CreateDocumentRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="collection_id",
- full_name="google.firestore.v1beta1.CreateDocumentRequest.collection_id",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="document_id",
- full_name="google.firestore.v1beta1.CreateDocumentRequest.document_id",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.CreateDocumentRequest.document",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="mask",
- full_name="google.firestore.v1beta1.CreateDocumentRequest.mask",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1062,
- serialized_end=1268,
-)
-
-
-_UPDATEDOCUMENTREQUEST = _descriptor.Descriptor(
- name="UpdateDocumentRequest",
- full_name="google.firestore.v1beta1.UpdateDocumentRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.UpdateDocumentRequest.document",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_mask",
- full_name="google.firestore.v1beta1.UpdateDocumentRequest.update_mask",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="mask",
- full_name="google.firestore.v1beta1.UpdateDocumentRequest.mask",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="current_document",
- full_name="google.firestore.v1beta1.UpdateDocumentRequest.current_document",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1271,
- serialized_end=1534,
-)
-
-
-_DELETEDOCUMENTREQUEST = _descriptor.Descriptor(
- name="DeleteDocumentRequest",
- full_name="google.firestore.v1beta1.DeleteDocumentRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.firestore.v1beta1.DeleteDocumentRequest.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="current_document",
- full_name="google.firestore.v1beta1.DeleteDocumentRequest.current_document",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1536,
- serialized_end=1644,
-)
-
-
-_BATCHGETDOCUMENTSREQUEST = _descriptor.Descriptor(
- name="BatchGetDocumentsRequest",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="database",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.database",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="documents",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.documents",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="mask",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.mask",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.transaction",
- index=3,
- number=4,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="new_transaction",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.read_time",
- index=5,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="consistency_selector",
- full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.consistency_selector",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=1647,
- serialized_end=1938,
-)
-
-
-_BATCHGETDOCUMENTSRESPONSE = _descriptor.Descriptor(
- name="BatchGetDocumentsResponse",
- full_name="google.firestore.v1beta1.BatchGetDocumentsResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="found",
- full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.found",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="missing",
- full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.missing",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.transaction",
- index=2,
- number=3,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.read_time",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="result",
- full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.result",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=1941,
- serialized_end=2118,
-)
-
-
-_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor(
- name="BeginTransactionRequest",
- full_name="google.firestore.v1beta1.BeginTransactionRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="database",
- full_name="google.firestore.v1beta1.BeginTransactionRequest.database",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="options",
- full_name="google.firestore.v1beta1.BeginTransactionRequest.options",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2120,
- serialized_end=2231,
-)
-
-
-_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor(
- name="BeginTransactionResponse",
- full_name="google.firestore.v1beta1.BeginTransactionResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.BeginTransactionResponse.transaction",
- index=0,
- number=1,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2233,
- serialized_end=2280,
-)
-
-
-_COMMITREQUEST = _descriptor.Descriptor(
- name="CommitRequest",
- full_name="google.firestore.v1beta1.CommitRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="database",
- full_name="google.firestore.v1beta1.CommitRequest.database",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="writes",
- full_name="google.firestore.v1beta1.CommitRequest.writes",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.CommitRequest.transaction",
- index=2,
- number=3,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2282,
- serialized_end=2390,
-)
-
-
-_COMMITRESPONSE = _descriptor.Descriptor(
- name="CommitResponse",
- full_name="google.firestore.v1beta1.CommitResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="write_results",
- full_name="google.firestore.v1beta1.CommitResponse.write_results",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="commit_time",
- full_name="google.firestore.v1beta1.CommitResponse.commit_time",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2392,
- serialized_end=2519,
-)
-
-
-_ROLLBACKREQUEST = _descriptor.Descriptor(
- name="RollbackRequest",
- full_name="google.firestore.v1beta1.RollbackRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="database",
- full_name="google.firestore.v1beta1.RollbackRequest.database",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.RollbackRequest.transaction",
- index=1,
- number=2,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2521,
- serialized_end=2587,
-)
-
-
-_RUNQUERYREQUEST = _descriptor.Descriptor(
- name="RunQueryRequest",
- full_name="google.firestore.v1beta1.RunQueryRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.v1beta1.RunQueryRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="structured_query",
- full_name="google.firestore.v1beta1.RunQueryRequest.structured_query",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.RunQueryRequest.transaction",
- index=2,
- number=5,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="new_transaction",
- full_name="google.firestore.v1beta1.RunQueryRequest.new_transaction",
- index=3,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.RunQueryRequest.read_time",
- index=4,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="query_type",
- full_name="google.firestore.v1beta1.RunQueryRequest.query_type",
- index=0,
- containing_type=None,
- fields=[],
- ),
- _descriptor.OneofDescriptor(
- name="consistency_selector",
- full_name="google.firestore.v1beta1.RunQueryRequest.consistency_selector",
- index=1,
- containing_type=None,
- fields=[],
- ),
- ],
- serialized_start=2590,
- serialized_end=2882,
-)
-
-
-_RUNQUERYRESPONSE = _descriptor.Descriptor(
- name="RunQueryResponse",
- full_name="google.firestore.v1beta1.RunQueryResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="transaction",
- full_name="google.firestore.v1beta1.RunQueryResponse.transaction",
- index=0,
- number=2,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.RunQueryResponse.document",
- index=1,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.RunQueryResponse.read_time",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="skipped_results",
- full_name="google.firestore.v1beta1.RunQueryResponse.skipped_results",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2885,
- serialized_end=3050,
-)
-
-
-_WRITEREQUEST_LABELSENTRY = _descriptor.Descriptor(
- name="LabelsEntry",
- full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry.value",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3250,
- serialized_end=3295,
-)
-
-_WRITEREQUEST = _descriptor.Descriptor(
- name="WriteRequest",
- full_name="google.firestore.v1beta1.WriteRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="database",
- full_name="google.firestore.v1beta1.WriteRequest.database",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="stream_id",
- full_name="google.firestore.v1beta1.WriteRequest.stream_id",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="writes",
- full_name="google.firestore.v1beta1.WriteRequest.writes",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="stream_token",
- full_name="google.firestore.v1beta1.WriteRequest.stream_token",
- index=3,
- number=4,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="labels",
- full_name="google.firestore.v1beta1.WriteRequest.labels",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_WRITEREQUEST_LABELSENTRY],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3053,
- serialized_end=3295,
-)
-
-
-_WRITERESPONSE = _descriptor.Descriptor(
- name="WriteResponse",
- full_name="google.firestore.v1beta1.WriteResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="stream_id",
- full_name="google.firestore.v1beta1.WriteResponse.stream_id",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="stream_token",
- full_name="google.firestore.v1beta1.WriteResponse.stream_token",
- index=1,
- number=2,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="write_results",
- full_name="google.firestore.v1beta1.WriteResponse.write_results",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="commit_time",
- full_name="google.firestore.v1beta1.WriteResponse.commit_time",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3298,
- serialized_end=3465,
-)
-
-
-_LISTENREQUEST_LABELSENTRY = _descriptor.Descriptor(
- name="LabelsEntry",
- full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry.value",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=3250,
- serialized_end=3295,
-)
-
-_LISTENREQUEST = _descriptor.Descriptor(
- name="ListenRequest",
- full_name="google.firestore.v1beta1.ListenRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="database",
- full_name="google.firestore.v1beta1.ListenRequest.database",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="add_target",
- full_name="google.firestore.v1beta1.ListenRequest.add_target",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="remove_target",
- full_name="google.firestore.v1beta1.ListenRequest.remove_target",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="labels",
- full_name="google.firestore.v1beta1.ListenRequest.labels",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_LISTENREQUEST_LABELSENTRY],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="target_change",
- full_name="google.firestore.v1beta1.ListenRequest.target_change",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=3468,
- serialized_end=3720,
-)
-
-
-_LISTENRESPONSE = _descriptor.Descriptor(
- name="ListenResponse",
- full_name="google.firestore.v1beta1.ListenResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="target_change",
- full_name="google.firestore.v1beta1.ListenResponse.target_change",
- index=0,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="document_change",
- full_name="google.firestore.v1beta1.ListenResponse.document_change",
- index=1,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="document_delete",
- full_name="google.firestore.v1beta1.ListenResponse.document_delete",
- index=2,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="document_remove",
- full_name="google.firestore.v1beta1.ListenResponse.document_remove",
- index=3,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="filter",
- full_name="google.firestore.v1beta1.ListenResponse.filter",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="response_type",
- full_name="google.firestore.v1beta1.ListenResponse.response_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=3723,
- serialized_end=4089,
-)
-
-
-_TARGET_DOCUMENTSTARGET = _descriptor.Descriptor(
- name="DocumentsTarget",
- full_name="google.firestore.v1beta1.Target.DocumentsTarget",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="documents",
- full_name="google.firestore.v1beta1.Target.DocumentsTarget.documents",
- index=0,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=4342,
- serialized_end=4378,
-)
-
-_TARGET_QUERYTARGET = _descriptor.Descriptor(
- name="QueryTarget",
- full_name="google.firestore.v1beta1.Target.QueryTarget",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.v1beta1.Target.QueryTarget.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="structured_query",
- full_name="google.firestore.v1beta1.Target.QueryTarget.structured_query",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="query_type",
- full_name="google.firestore.v1beta1.Target.QueryTarget.query_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=4380,
- serialized_end=4494,
-)
-
-_TARGET = _descriptor.Descriptor(
- name="Target",
- full_name="google.firestore.v1beta1.Target",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="query",
- full_name="google.firestore.v1beta1.Target.query",
- index=0,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="documents",
- full_name="google.firestore.v1beta1.Target.documents",
- index=1,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="resume_token",
- full_name="google.firestore.v1beta1.Target.resume_token",
- index=2,
- number=4,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.Target.read_time",
- index=3,
- number=11,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="target_id",
- full_name="google.firestore.v1beta1.Target.target_id",
- index=4,
- number=5,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="once",
- full_name="google.firestore.v1beta1.Target.once",
- index=5,
- number=6,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_TARGET_DOCUMENTSTARGET, _TARGET_QUERYTARGET],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="target_type",
- full_name="google.firestore.v1beta1.Target.target_type",
- index=0,
- containing_type=None,
- fields=[],
- ),
- _descriptor.OneofDescriptor(
- name="resume_type",
- full_name="google.firestore.v1beta1.Target.resume_type",
- index=1,
- containing_type=None,
- fields=[],
- ),
- ],
- serialized_start=4092,
- serialized_end=4524,
-)
-
-
-_TARGETCHANGE = _descriptor.Descriptor(
- name="TargetChange",
- full_name="google.firestore.v1beta1.TargetChange",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="target_change_type",
- full_name="google.firestore.v1beta1.TargetChange.target_change_type",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="target_ids",
- full_name="google.firestore.v1beta1.TargetChange.target_ids",
- index=1,
- number=2,
- type=5,
- cpp_type=1,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="cause",
- full_name="google.firestore.v1beta1.TargetChange.cause",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="resume_token",
- full_name="google.firestore.v1beta1.TargetChange.resume_token",
- index=3,
- number=4,
- type=12,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b(""),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.TargetChange.read_time",
- index=4,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_TARGETCHANGE_TARGETCHANGETYPE],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=4527,
- serialized_end=4830,
-)
-
-
-_LISTCOLLECTIONIDSREQUEST = _descriptor.Descriptor(
- name="ListCollectionIdsRequest",
- full_name="google.firestore.v1beta1.ListCollectionIdsRequest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="parent",
- full_name="google.firestore.v1beta1.ListCollectionIdsRequest.parent",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_size",
- full_name="google.firestore.v1beta1.ListCollectionIdsRequest.page_size",
- index=1,
- number=2,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="page_token",
- full_name="google.firestore.v1beta1.ListCollectionIdsRequest.page_token",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=4832,
- serialized_end=4918,
-)
-
-
-_LISTCOLLECTIONIDSRESPONSE = _descriptor.Descriptor(
- name="ListCollectionIdsResponse",
- full_name="google.firestore.v1beta1.ListCollectionIdsResponse",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="collection_ids",
- full_name="google.firestore.v1beta1.ListCollectionIdsResponse.collection_ids",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="next_page_token",
- full_name="google.firestore.v1beta1.ListCollectionIdsResponse.next_page_token",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=4920,
- serialized_end=4996,
-)
-
-_GETDOCUMENTREQUEST.fields_by_name[
- "mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_GETDOCUMENTREQUEST.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _GETDOCUMENTREQUEST.fields_by_name["transaction"]
-)
-_GETDOCUMENTREQUEST.fields_by_name[
- "transaction"
-].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"]
-_GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _GETDOCUMENTREQUEST.fields_by_name["read_time"]
-)
-_GETDOCUMENTREQUEST.fields_by_name[
- "read_time"
-].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"]
-_LISTDOCUMENTSREQUEST.fields_by_name[
- "mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_LISTDOCUMENTSREQUEST.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _LISTDOCUMENTSREQUEST.fields_by_name["transaction"]
-)
-_LISTDOCUMENTSREQUEST.fields_by_name[
- "transaction"
-].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
-_LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _LISTDOCUMENTSREQUEST.fields_by_name["read_time"]
-)
-_LISTDOCUMENTSREQUEST.fields_by_name[
- "read_time"
-].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
-_LISTDOCUMENTSRESPONSE.fields_by_name[
- "documents"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_CREATEDOCUMENTREQUEST.fields_by_name[
- "document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_CREATEDOCUMENTREQUEST.fields_by_name[
- "mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_UPDATEDOCUMENTREQUEST.fields_by_name[
- "document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_UPDATEDOCUMENTREQUEST.fields_by_name[
- "update_mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_UPDATEDOCUMENTREQUEST.fields_by_name[
- "mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_UPDATEDOCUMENTREQUEST.fields_by_name[
- "current_document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_DELETEDOCUMENTREQUEST.fields_by_name[
- "current_document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_BATCHGETDOCUMENTSREQUEST.fields_by_name[
- "mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_BATCHGETDOCUMENTSREQUEST.fields_by_name[
- "new_transaction"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS
-)
-_BATCHGETDOCUMENTSREQUEST.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _BATCHGETDOCUMENTSREQUEST.fields_by_name["transaction"]
-)
-_BATCHGETDOCUMENTSREQUEST.fields_by_name[
- "transaction"
-].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
-_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _BATCHGETDOCUMENTSREQUEST.fields_by_name["new_transaction"]
-)
-_BATCHGETDOCUMENTSREQUEST.fields_by_name[
- "new_transaction"
-].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
-_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _BATCHGETDOCUMENTSREQUEST.fields_by_name["read_time"]
-)
-_BATCHGETDOCUMENTSREQUEST.fields_by_name[
- "read_time"
-].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"]
-_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
- "found"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"].fields.append(
- _BATCHGETDOCUMENTSRESPONSE.fields_by_name["found"]
-)
-_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
- "found"
-].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"]
-_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"].fields.append(
- _BATCHGETDOCUMENTSRESPONSE.fields_by_name["missing"]
-)
-_BATCHGETDOCUMENTSRESPONSE.fields_by_name[
- "missing"
-].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"]
-_BEGINTRANSACTIONREQUEST.fields_by_name[
- "options"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS
-)
-_COMMITREQUEST.fields_by_name[
- "writes"
-].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE
-_COMMITRESPONSE.fields_by_name[
- "write_results"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT
-)
-_COMMITRESPONSE.fields_by_name[
- "commit_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_RUNQUERYREQUEST.fields_by_name[
- "structured_query"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
-)
-_RUNQUERYREQUEST.fields_by_name[
- "new_transaction"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS
-)
-_RUNQUERYREQUEST.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_RUNQUERYREQUEST.oneofs_by_name["query_type"].fields.append(
- _RUNQUERYREQUEST.fields_by_name["structured_query"]
-)
-_RUNQUERYREQUEST.fields_by_name[
- "structured_query"
-].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["query_type"]
-_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _RUNQUERYREQUEST.fields_by_name["transaction"]
-)
-_RUNQUERYREQUEST.fields_by_name[
- "transaction"
-].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"]
-_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _RUNQUERYREQUEST.fields_by_name["new_transaction"]
-)
-_RUNQUERYREQUEST.fields_by_name[
- "new_transaction"
-].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"]
-_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append(
- _RUNQUERYREQUEST.fields_by_name["read_time"]
-)
-_RUNQUERYREQUEST.fields_by_name[
- "read_time"
-].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"]
-_RUNQUERYRESPONSE.fields_by_name[
- "document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_RUNQUERYRESPONSE.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_WRITEREQUEST_LABELSENTRY.containing_type = _WRITEREQUEST
-_WRITEREQUEST.fields_by_name[
- "writes"
-].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE
-_WRITEREQUEST.fields_by_name["labels"].message_type = _WRITEREQUEST_LABELSENTRY
-_WRITERESPONSE.fields_by_name[
- "write_results"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT
-)
-_WRITERESPONSE.fields_by_name[
- "commit_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_LISTENREQUEST_LABELSENTRY.containing_type = _LISTENREQUEST
-_LISTENREQUEST.fields_by_name["add_target"].message_type = _TARGET
-_LISTENREQUEST.fields_by_name["labels"].message_type = _LISTENREQUEST_LABELSENTRY
-_LISTENREQUEST.oneofs_by_name["target_change"].fields.append(
- _LISTENREQUEST.fields_by_name["add_target"]
-)
-_LISTENREQUEST.fields_by_name[
- "add_target"
-].containing_oneof = _LISTENREQUEST.oneofs_by_name["target_change"]
-_LISTENREQUEST.oneofs_by_name["target_change"].fields.append(
- _LISTENREQUEST.fields_by_name["remove_target"]
-)
-_LISTENREQUEST.fields_by_name[
- "remove_target"
-].containing_oneof = _LISTENREQUEST.oneofs_by_name["target_change"]
-_LISTENRESPONSE.fields_by_name["target_change"].message_type = _TARGETCHANGE
-_LISTENRESPONSE.fields_by_name[
- "document_change"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTCHANGE
-)
-_LISTENRESPONSE.fields_by_name[
- "document_delete"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTDELETE
-)
-_LISTENRESPONSE.fields_by_name[
- "document_remove"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTREMOVE
-)
-_LISTENRESPONSE.fields_by_name[
- "filter"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._EXISTENCEFILTER
-)
-_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
- _LISTENRESPONSE.fields_by_name["target_change"]
-)
-_LISTENRESPONSE.fields_by_name[
- "target_change"
-].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
-_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
- _LISTENRESPONSE.fields_by_name["document_change"]
-)
-_LISTENRESPONSE.fields_by_name[
- "document_change"
-].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
-_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
- _LISTENRESPONSE.fields_by_name["document_delete"]
-)
-_LISTENRESPONSE.fields_by_name[
- "document_delete"
-].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
-_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
- _LISTENRESPONSE.fields_by_name["document_remove"]
-)
-_LISTENRESPONSE.fields_by_name[
- "document_remove"
-].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
-_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append(
- _LISTENRESPONSE.fields_by_name["filter"]
-)
-_LISTENRESPONSE.fields_by_name[
- "filter"
-].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"]
-_TARGET_DOCUMENTSTARGET.containing_type = _TARGET
-_TARGET_QUERYTARGET.fields_by_name[
- "structured_query"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
-)
-_TARGET_QUERYTARGET.containing_type = _TARGET
-_TARGET_QUERYTARGET.oneofs_by_name["query_type"].fields.append(
- _TARGET_QUERYTARGET.fields_by_name["structured_query"]
-)
-_TARGET_QUERYTARGET.fields_by_name[
- "structured_query"
-].containing_oneof = _TARGET_QUERYTARGET.oneofs_by_name["query_type"]
-_TARGET.fields_by_name["query"].message_type = _TARGET_QUERYTARGET
-_TARGET.fields_by_name["documents"].message_type = _TARGET_DOCUMENTSTARGET
-_TARGET.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TARGET.oneofs_by_name["target_type"].fields.append(_TARGET.fields_by_name["query"])
-_TARGET.fields_by_name["query"].containing_oneof = _TARGET.oneofs_by_name["target_type"]
-_TARGET.oneofs_by_name["target_type"].fields.append(_TARGET.fields_by_name["documents"])
-_TARGET.fields_by_name["documents"].containing_oneof = _TARGET.oneofs_by_name[
- "target_type"
-]
-_TARGET.oneofs_by_name["resume_type"].fields.append(
- _TARGET.fields_by_name["resume_token"]
-)
-_TARGET.fields_by_name["resume_token"].containing_oneof = _TARGET.oneofs_by_name[
- "resume_type"
-]
-_TARGET.oneofs_by_name["resume_type"].fields.append(_TARGET.fields_by_name["read_time"])
-_TARGET.fields_by_name["read_time"].containing_oneof = _TARGET.oneofs_by_name[
- "resume_type"
-]
-_TARGETCHANGE.fields_by_name[
- "target_change_type"
-].enum_type = _TARGETCHANGE_TARGETCHANGETYPE
-_TARGETCHANGE.fields_by_name[
- "cause"
-].message_type = google_dot_rpc_dot_status__pb2._STATUS
-_TARGETCHANGE.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TARGETCHANGE_TARGETCHANGETYPE.containing_type = _TARGETCHANGE
-DESCRIPTOR.message_types_by_name["GetDocumentRequest"] = _GETDOCUMENTREQUEST
-DESCRIPTOR.message_types_by_name["ListDocumentsRequest"] = _LISTDOCUMENTSREQUEST
-DESCRIPTOR.message_types_by_name["ListDocumentsResponse"] = _LISTDOCUMENTSRESPONSE
-DESCRIPTOR.message_types_by_name["CreateDocumentRequest"] = _CREATEDOCUMENTREQUEST
-DESCRIPTOR.message_types_by_name["UpdateDocumentRequest"] = _UPDATEDOCUMENTREQUEST
-DESCRIPTOR.message_types_by_name["DeleteDocumentRequest"] = _DELETEDOCUMENTREQUEST
-DESCRIPTOR.message_types_by_name["BatchGetDocumentsRequest"] = _BATCHGETDOCUMENTSREQUEST
-DESCRIPTOR.message_types_by_name[
- "BatchGetDocumentsResponse"
-] = _BATCHGETDOCUMENTSRESPONSE
-DESCRIPTOR.message_types_by_name["BeginTransactionRequest"] = _BEGINTRANSACTIONREQUEST
-DESCRIPTOR.message_types_by_name["BeginTransactionResponse"] = _BEGINTRANSACTIONRESPONSE
-DESCRIPTOR.message_types_by_name["CommitRequest"] = _COMMITREQUEST
-DESCRIPTOR.message_types_by_name["CommitResponse"] = _COMMITRESPONSE
-DESCRIPTOR.message_types_by_name["RollbackRequest"] = _ROLLBACKREQUEST
-DESCRIPTOR.message_types_by_name["RunQueryRequest"] = _RUNQUERYREQUEST
-DESCRIPTOR.message_types_by_name["RunQueryResponse"] = _RUNQUERYRESPONSE
-DESCRIPTOR.message_types_by_name["WriteRequest"] = _WRITEREQUEST
-DESCRIPTOR.message_types_by_name["WriteResponse"] = _WRITERESPONSE
-DESCRIPTOR.message_types_by_name["ListenRequest"] = _LISTENREQUEST
-DESCRIPTOR.message_types_by_name["ListenResponse"] = _LISTENRESPONSE
-DESCRIPTOR.message_types_by_name["Target"] = _TARGET
-DESCRIPTOR.message_types_by_name["TargetChange"] = _TARGETCHANGE
-DESCRIPTOR.message_types_by_name["ListCollectionIdsRequest"] = _LISTCOLLECTIONIDSREQUEST
-DESCRIPTOR.message_types_by_name[
- "ListCollectionIdsResponse"
-] = _LISTCOLLECTIONIDSRESPONSE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-GetDocumentRequest = _reflection.GeneratedProtocolMessageType(
- "GetDocumentRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETDOCUMENTREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument].
-
-
- Attributes:
- name:
- Required. The resource name of the Document to get. In the
- format: ``projects/{project_id}/databases/{database_id}/docume
- nts/{document_path}``.
- mask:
- The fields to return. If not set, returns all fields. If the
- document has a field that is not present in this mask, that
- field will not be returned in the response.
- consistency_selector:
- The consistency mode for this transaction. If not set,
- defaults to strong consistency.
- transaction:
- Reads the document in a transaction.
- read_time:
- Reads the version of the document at the given time. This may
- not be older than 60 seconds.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.GetDocumentRequest)
- ),
-)
-_sym_db.RegisterMessage(GetDocumentRequest)
-
-ListDocumentsRequest = _reflection.GeneratedProtocolMessageType(
- "ListDocumentsRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTDOCUMENTSREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
-
-
- Attributes:
- parent:
- Required. The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{doc
- ument_path}``. For example: ``projects/my-
- project/databases/my-database/documents`` or ``projects/my-
- project/databases/my-database/documents/chatrooms/my-
- chatroom``
- collection_id:
- Required. The collection ID, relative to ``parent``, to list.
- For example: ``chatrooms`` or ``messages``.
- page_size:
- The maximum number of documents to return.
- page_token:
- The ``next_page_token`` value returned from a previous List
- request, if any.
- order_by:
- The order to sort results by. For example: ``priority desc,
- name``.
- mask:
- The fields to return. If not set, returns all fields. If a
- document has a field that is not present in this mask, that
- field will not be returned in the response.
- consistency_selector:
- The consistency mode for this transaction. If not set,
- defaults to strong consistency.
- transaction:
- Reads documents in a transaction.
- read_time:
- Reads documents as they were at the given time. This may not
- be older than 60 seconds.
- show_missing:
- If the list should show missing documents. A missing document
- is a document that does not exist but has sub-documents. These
- documents will be returned with a key but will not have
- fields, [Document.create\_time][google.firestore.v1beta1.Docum
- ent.create\_time], or [Document.update\_time][google.firestore
- .v1beta1.Document.update\_time] set. Requests with
- ``show_missing`` may not specify ``where`` or ``order_by``.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsRequest)
- ),
-)
-_sym_db.RegisterMessage(ListDocumentsRequest)
-
-ListDocumentsResponse = _reflection.GeneratedProtocolMessageType(
- "ListDocumentsResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTDOCUMENTSRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response for
- [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
-
-
- Attributes:
- documents:
- The Documents found.
- next_page_token:
- The next page token.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsResponse)
- ),
-)
-_sym_db.RegisterMessage(ListDocumentsResponse)
-
-CreateDocumentRequest = _reflection.GeneratedProtocolMessageType(
- "CreateDocumentRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATEDOCUMENTREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument].
-
-
- Attributes:
- parent:
- Required. The parent resource. For example:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/chat
- rooms/{chatroom_id}``
- collection_id:
- Required. The collection ID, relative to ``parent``, to list.
- For example: ``chatrooms``.
- document_id:
- The client-assigned document ID to use for this document.
- Optional. If not specified, an ID will be assigned by the
- service.
- document:
- Required. The document to create. ``name`` must not be set.
- mask:
- The fields to return. If not set, returns all fields. If the
- document has a field that is not present in this mask, that
- field will not be returned in the response.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CreateDocumentRequest)
- ),
-)
-_sym_db.RegisterMessage(CreateDocumentRequest)
-
-UpdateDocumentRequest = _reflection.GeneratedProtocolMessageType(
- "UpdateDocumentRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEDOCUMENTREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument].
-
-
- Attributes:
- document:
- Required. The updated document. Creates the document if it
- does not already exist.
- update_mask:
- The fields to update. None of the field paths in the mask may
- contain a reserved name. If the document exists on the server
- and has fields not referenced in the mask, they are left
- unchanged. Fields referenced in the mask, but not present in
- the input document, are deleted from the document on the
- server.
- mask:
- The fields to return. If not set, returns all fields. If the
- document has a field that is not present in this mask, that
- field will not be returned in the response.
- current_document:
- An optional precondition on the document. The request will
- fail if this is set and not met by the target document.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.UpdateDocumentRequest)
- ),
-)
-_sym_db.RegisterMessage(UpdateDocumentRequest)
-
-DeleteDocumentRequest = _reflection.GeneratedProtocolMessageType(
- "DeleteDocumentRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETEDOCUMENTREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument].
-
-
- Attributes:
- name:
- Required. The resource name of the Document to delete. In the
- format: ``projects/{project_id}/databases/{database_id}/docume
- nts/{document_path}``.
- current_document:
- An optional precondition on the document. The request will
- fail if this is set and not met by the target document.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DeleteDocumentRequest)
- ),
-)
-_sym_db.RegisterMessage(DeleteDocumentRequest)
-
-BatchGetDocumentsRequest = _reflection.GeneratedProtocolMessageType(
- "BatchGetDocumentsRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_BATCHGETDOCUMENTSREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
-
-
- Attributes:
- database:
- Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- documents:
- The names of the documents to retrieve. In the format: ``proje
- cts/{project_id}/databases/{database_id}/documents/{document_p
- ath}``. The request will fail if any of the document is not a
- child resource of the given ``database``. Duplicate names will
- be elided.
- mask:
- The fields to return. If not set, returns all fields. If a
- document has a field that is not present in this mask, that
- field will not be returned in the response.
- consistency_selector:
- The consistency mode for this transaction. If not set,
- defaults to strong consistency.
- transaction:
- Reads documents in a transaction.
- new_transaction:
- Starts a new transaction and reads the documents. Defaults to
- a read-only transaction. The new transaction ID will be
- returned as the first response in the stream.
- read_time:
- Reads documents as they were at the given time. This may not
- be older than 60 seconds.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsRequest)
- ),
-)
-_sym_db.RegisterMessage(BatchGetDocumentsRequest)
-
-BatchGetDocumentsResponse = _reflection.GeneratedProtocolMessageType(
- "BatchGetDocumentsResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_BATCHGETDOCUMENTSRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The streamed response for
- [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
-
-
- Attributes:
- result:
- A single result. This can be empty if the server is just
- returning a transaction.
- found:
- A document that was requested.
- missing:
- A document name that was requested but does not exist. In the
- format: ``projects/{project_id}/databases/{database_id}/docume
- nts/{document_path}``.
- transaction:
- The transaction that was started as part of this request. Will
- only be set in the first response, and only if [BatchGetDocume
- ntsRequest.new\_transaction][google.firestore.v1beta1.BatchGet
- DocumentsRequest.new\_transaction] was set in the request.
- read_time:
- The time at which the document was read. This may be
- monotically increasing, in this case the previous documents in
- the result stream are guaranteed not to have changed between
- their read\_time and this one.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsResponse)
- ),
-)
-_sym_db.RegisterMessage(BatchGetDocumentsResponse)
-
-BeginTransactionRequest = _reflection.GeneratedProtocolMessageType(
- "BeginTransactionRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_BEGINTRANSACTIONREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
-
-
- Attributes:
- database:
- Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- options:
- The options for the transaction. Defaults to a read-write
- transaction.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionRequest)
- ),
-)
-_sym_db.RegisterMessage(BeginTransactionRequest)
-
-BeginTransactionResponse = _reflection.GeneratedProtocolMessageType(
- "BeginTransactionResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_BEGINTRANSACTIONRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response for
- [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
-
-
- Attributes:
- transaction:
- The transaction that was started.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionResponse)
- ),
-)
-_sym_db.RegisterMessage(BeginTransactionResponse)
-
-CommitRequest = _reflection.GeneratedProtocolMessageType(
- "CommitRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_COMMITREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
-
-
- Attributes:
- database:
- Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- writes:
- The writes to apply. Always executed atomically and in order.
- transaction:
- If set, applies all writes in this transaction, and commits
- it.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitRequest)
- ),
-)
-_sym_db.RegisterMessage(CommitRequest)
-
-CommitResponse = _reflection.GeneratedProtocolMessageType(
- "CommitResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_COMMITRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response for
- [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
-
-
- Attributes:
- write_results:
- The result of applying the writes. This i-th write result
- corresponds to the i-th write in the request.
- commit_time:
- The time at which the commit occurred.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitResponse)
- ),
-)
-_sym_db.RegisterMessage(CommitResponse)
-
-RollbackRequest = _reflection.GeneratedProtocolMessageType(
- "RollbackRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ROLLBACKREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback].
-
-
- Attributes:
- database:
- Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- transaction:
- Required. The transaction to roll back.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RollbackRequest)
- ),
-)
-_sym_db.RegisterMessage(RollbackRequest)
-
-RunQueryRequest = _reflection.GeneratedProtocolMessageType(
- "RunQueryRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_RUNQUERYREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
-
-
- Attributes:
- parent:
- Required. The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{doc
- ument_path}``. For example: ``projects/my-
- project/databases/my-database/documents`` or ``projects/my-
- project/databases/my-database/documents/chatrooms/my-
- chatroom``
- query_type:
- The query to run.
- structured_query:
- A structured query.
- consistency_selector:
- The consistency mode for this transaction. If not set,
- defaults to strong consistency.
- transaction:
- Reads documents in a transaction.
- new_transaction:
- Starts a new transaction and reads the documents. Defaults to
- a read-only transaction. The new transaction ID will be
- returned as the first response in the stream.
- read_time:
- Reads documents as they were at the given time. This may not
- be older than 60 seconds.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryRequest)
- ),
-)
-_sym_db.RegisterMessage(RunQueryRequest)
-
-RunQueryResponse = _reflection.GeneratedProtocolMessageType(
- "RunQueryResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_RUNQUERYRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response for
- [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
-
-
- Attributes:
- transaction:
- The transaction that was started as part of this request. Can
- only be set in the first response, and only if [RunQueryReques
- t.new\_transaction][google.firestore.v1beta1.RunQueryRequest.n
- ew\_transaction] was set in the request. If set, no other
- fields will be set in this response.
- document:
- A query result. Not set when reporting partial progress.
- read_time:
- The time at which the document was read. This may be
- monotonically increasing; in this case, the previous documents
- in the result stream are guaranteed not to have changed
- between their ``read_time`` and this one. If the query
- returns no results, a response with ``read_time`` and no
- ``document`` will be sent, and this represents the time at
- which the query was run.
- skipped_results:
- The number of results that have been skipped due to an offset
- between the last response and the current response.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryResponse)
- ),
-)
-_sym_db.RegisterMessage(RunQueryResponse)
-
-WriteRequest = _reflection.GeneratedProtocolMessageType(
- "WriteRequest",
- (_message.Message,),
- dict(
- LabelsEntry=_reflection.GeneratedProtocolMessageType(
- "LabelsEntry",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WRITEREQUEST_LABELSENTRY,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2"
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest.LabelsEntry)
- ),
- ),
- DESCRIPTOR=_WRITEREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
-
- The first request creates a stream, or resumes an existing one from a
- token.
-
- When creating a new stream, the server replies with a response
- containing only an ID and a token, to use in the next request.
-
- When resuming a stream, the server first streams any responses later
- than the given token, then a response containing only an up-to-date
- token, to use in the next request.
-
-
- Attributes:
- database:
- Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``. This is
- only required in the first message.
- stream_id:
- The ID of the write stream to resume. This may only be set in
- the first message. When left empty, a new write stream will be
- created.
- writes:
- The writes to apply. Always executed atomically and in order.
- This must be empty on the first request. This may be empty on
- the last request. This must not be empty on all other
- requests.
- stream_token:
- A stream token that was previously sent by the server. The
- client should set this field to the token from the most recent
- [WriteResponse][google.firestore.v1beta1.WriteResponse] it has
- received. This acknowledges that the client has received
- responses up to this token. After sending this token, earlier
- tokens may not be used anymore. The server may close the
- stream if there are too many unacknowledged responses. Leave
- this field unset when creating a new stream. To resume a
- stream at a specific point, set this field and the
- ``stream_id`` field. Leave this field unset when creating a
- new stream.
- labels:
- Labels associated with this write request.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest)
- ),
-)
-_sym_db.RegisterMessage(WriteRequest)
-_sym_db.RegisterMessage(WriteRequest.LabelsEntry)
-
-WriteResponse = _reflection.GeneratedProtocolMessageType(
- "WriteResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WRITERESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response for
- [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
-
-
- Attributes:
- stream_id:
- The ID of the stream. Only set on the first message, when a
- new stream was created.
- stream_token:
- A token that represents the position of this response in the
- stream. This can be used by a client to resume the stream at
- this point. This field is always set.
- write_results:
- The result of applying the writes. This i-th write result
- corresponds to the i-th write in the request.
- commit_time:
- The time at which the commit occurred.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResponse)
- ),
-)
-_sym_db.RegisterMessage(WriteResponse)
-
-ListenRequest = _reflection.GeneratedProtocolMessageType(
- "ListenRequest",
- (_message.Message,),
- dict(
- LabelsEntry=_reflection.GeneratedProtocolMessageType(
- "LabelsEntry",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTENREQUEST_LABELSENTRY,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2"
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest.LabelsEntry)
- ),
- ),
- DESCRIPTOR=_LISTENREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""A request for
- [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]
-
-
- Attributes:
- database:
- Required. The database name. In the format:
- ``projects/{project_id}/databases/{database_id}``.
- target_change:
- The supported target changes.
- add_target:
- A target to add to this stream.
- remove_target:
- The ID of a target to remove from this stream.
- labels:
- Labels associated with this target change.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest)
- ),
-)
-_sym_db.RegisterMessage(ListenRequest)
-_sym_db.RegisterMessage(ListenRequest.LabelsEntry)
-
-ListenResponse = _reflection.GeneratedProtocolMessageType(
- "ListenResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTENRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response for
- [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen].
-
-
- Attributes:
- response_type:
- The supported responses.
- target_change:
- Targets have changed.
- document_change:
- A [Document][google.firestore.v1beta1.Document] has changed.
- document_delete:
- A [Document][google.firestore.v1beta1.Document] has been
- deleted.
- document_remove:
- A [Document][google.firestore.v1beta1.Document] has been
- removed from a target (because it is no longer relevant to
- that target).
- filter:
- A filter to apply to the set of documents previously returned
- for the given target. Returned when documents may have been
- removed from the given target, but the exact documents are
- unknown.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenResponse)
- ),
-)
-_sym_db.RegisterMessage(ListenResponse)
-
-Target = _reflection.GeneratedProtocolMessageType(
- "Target",
- (_message.Message,),
- dict(
- DocumentsTarget=_reflection.GeneratedProtocolMessageType(
- "DocumentsTarget",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TARGET_DOCUMENTSTARGET,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""A target specified by a set of documents names.
-
-
- Attributes:
- documents:
- The names of the documents to retrieve. In the format: ``proje
- cts/{project_id}/databases/{database_id}/documents/{document_p
- ath}``. The request will fail if any of the document is not a
- child resource of the given ``database``. Duplicate names will
- be elided.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.DocumentsTarget)
- ),
- ),
- QueryTarget=_reflection.GeneratedProtocolMessageType(
- "QueryTarget",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TARGET_QUERYTARGET,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""A target specified by a query.
-
-
- Attributes:
- parent:
- The parent resource name. In the format:
- ``projects/{project_id}/databases/{database_id}/documents`` or
- ``projects/{project_id}/databases/{database_id}/documents/{doc
- ument_path}``. For example: ``projects/my-
- project/databases/my-database/documents`` or ``projects/my-
- project/databases/my-database/documents/chatrooms/my-
- chatroom``
- query_type:
- The query to run.
- structured_query:
- A structured query.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.QueryTarget)
- ),
- ),
- DESCRIPTOR=_TARGET,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""A specification of a set of documents to listen to.
-
-
- Attributes:
- target_type:
- The type of target to listen to.
- query:
- A target specified by a query.
- documents:
- A target specified by a set of document names.
- resume_type:
- When to start listening. If not specified, all matching
- Documents are returned before any subsequent changes.
- resume_token:
- A resume token from a prior
- [TargetChange][google.firestore.v1beta1.TargetChange] for an
- identical target. Using a resume token with a different
- target is unsupported and may fail.
- read_time:
- Start listening after a specific ``read_time``. The client
- must know the state of matching documents at this time.
- target_id:
- The target ID that identifies the target on the stream. Must
- be a positive number and non-zero.
- once:
- If the target should be removed once it is current and
- consistent.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target)
- ),
-)
-_sym_db.RegisterMessage(Target)
-_sym_db.RegisterMessage(Target.DocumentsTarget)
-_sym_db.RegisterMessage(Target.QueryTarget)
-
-TargetChange = _reflection.GeneratedProtocolMessageType(
- "TargetChange",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TARGETCHANGE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""Targets being watched have changed.
-
-
- Attributes:
- target_change_type:
- The type of change that occurred.
- target_ids:
- The target IDs of targets that have changed. If empty, the
- change applies to all targets. The order of the target IDs is
- not defined.
- cause:
- The error that resulted in this change, if applicable.
- resume_token:
- A token that can be used to resume the stream for the given
- ``target_ids``, or all targets if ``target_ids`` is empty.
- Not set on every target change.
- read_time:
- The consistent ``read_time`` for the given ``target_ids``
- (omitted when the target\_ids are not at a consistent
- snapshot). The stream is guaranteed to send a ``read_time``
- with ``target_ids`` empty whenever the entire stream reaches a
- new consistent snapshot. ADD, CURRENT, and RESET messages are
- guaranteed to (eventually) result in a new consistent snapshot
- (while NO\_CHANGE and REMOVE messages are not). For a given
- stream, ``read_time`` is guaranteed to be monotonically
- increasing.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TargetChange)
- ),
-)
-_sym_db.RegisterMessage(TargetChange)
-
-ListCollectionIdsRequest = _reflection.GeneratedProtocolMessageType(
- "ListCollectionIdsRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTCOLLECTIONIDSREQUEST,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The request for
- [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
-
-
- Attributes:
- parent:
- Required. The parent document. In the format: ``projects/{proj
- ect_id}/databases/{database_id}/documents/{document_path}``.
- For example: ``projects/my-project/databases/my-
- database/documents/chatrooms/my-chatroom``
- page_size:
- The maximum number of results to return.
- page_token:
- A page token. Must be a value from [ListCollectionIdsResponse]
- [google.firestore.v1beta1.ListCollectionIdsResponse].
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsRequest)
- ),
-)
-_sym_db.RegisterMessage(ListCollectionIdsRequest)
-
-ListCollectionIdsResponse = _reflection.GeneratedProtocolMessageType(
- "ListCollectionIdsResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTCOLLECTIONIDSRESPONSE,
- __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2",
- __doc__="""The response from
- [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
-
-
- Attributes:
- collection_ids:
- The collection ids.
- next_page_token:
- A page token that may be used to continue the list.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsResponse)
- ),
-)
-_sym_db.RegisterMessage(ListCollectionIdsResponse)
-
-
-DESCRIPTOR._options = None
-_GETDOCUMENTREQUEST.fields_by_name["name"]._options = None
-_LISTDOCUMENTSREQUEST.fields_by_name["parent"]._options = None
-_LISTDOCUMENTSREQUEST.fields_by_name["collection_id"]._options = None
-_CREATEDOCUMENTREQUEST.fields_by_name["parent"]._options = None
-_CREATEDOCUMENTREQUEST.fields_by_name["collection_id"]._options = None
-_CREATEDOCUMENTREQUEST.fields_by_name["document"]._options = None
-_UPDATEDOCUMENTREQUEST.fields_by_name["document"]._options = None
-_DELETEDOCUMENTREQUEST.fields_by_name["name"]._options = None
-_BATCHGETDOCUMENTSREQUEST.fields_by_name["database"]._options = None
-_BEGINTRANSACTIONREQUEST.fields_by_name["database"]._options = None
-_COMMITREQUEST.fields_by_name["database"]._options = None
-_ROLLBACKREQUEST.fields_by_name["database"]._options = None
-_ROLLBACKREQUEST.fields_by_name["transaction"]._options = None
-_RUNQUERYREQUEST.fields_by_name["parent"]._options = None
-_WRITEREQUEST_LABELSENTRY._options = None
-_WRITEREQUEST.fields_by_name["database"]._options = None
-_LISTENREQUEST_LABELSENTRY._options = None
-_LISTENREQUEST.fields_by_name["database"]._options = None
-_LISTCOLLECTIONIDSREQUEST.fields_by_name["parent"]._options = None
-
-_FIRESTORE = _descriptor.ServiceDescriptor(
- name="Firestore",
- full_name="google.firestore.v1beta1.Firestore",
- file=DESCRIPTOR,
- index=0,
- serialized_options=_b(
- "\312A\030firestore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore"
- ),
- serialized_start=4999,
- serialized_end=7714,
- methods=[
- _descriptor.MethodDescriptor(
- name="GetDocument",
- full_name="google.firestore.v1beta1.Firestore.GetDocument",
- index=0,
- containing_service=None,
- input_type=_GETDOCUMENTREQUEST,
- output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT,
- serialized_options=_b(
- "\202\323\344\223\0027\0225/v1beta1/{name=projects/*/databases/*/documents/*/**}"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ListDocuments",
- full_name="google.firestore.v1beta1.Firestore.ListDocuments",
- index=1,
- containing_service=None,
- input_type=_LISTDOCUMENTSREQUEST,
- output_type=_LISTDOCUMENTSRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002I\022G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="CreateDocument",
- full_name="google.firestore.v1beta1.Firestore.CreateDocument",
- index=2,
- containing_service=None,
- input_type=_CREATEDOCUMENTREQUEST,
- output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT,
- serialized_options=_b(
- '\202\323\344\223\002Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\010document'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="UpdateDocument",
- full_name="google.firestore.v1beta1.Firestore.UpdateDocument",
- index=3,
- containing_service=None,
- input_type=_UPDATEDOCUMENTREQUEST,
- output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT,
- serialized_options=_b(
- "\202\323\344\223\002J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\010document\332A\024document,update_mask"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="DeleteDocument",
- full_name="google.firestore.v1beta1.Firestore.DeleteDocument",
- index=4,
- containing_service=None,
- input_type=_DELETEDOCUMENTREQUEST,
- output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- "\202\323\344\223\0027*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\332A\004name"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="BatchGetDocuments",
- full_name="google.firestore.v1beta1.Firestore.BatchGetDocuments",
- index=5,
- containing_service=None,
- input_type=_BATCHGETDOCUMENTSREQUEST,
- output_type=_BATCHGETDOCUMENTSRESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002B"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\001*'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="BeginTransaction",
- full_name="google.firestore.v1beta1.Firestore.BeginTransaction",
- index=6,
- containing_service=None,
- input_type=_BEGINTRANSACTIONREQUEST,
- output_type=_BEGINTRANSACTIONRESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\001*\332A\010database'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="Commit",
- full_name="google.firestore.v1beta1.Firestore.Commit",
- index=7,
- containing_service=None,
- input_type=_COMMITREQUEST,
- output_type=_COMMITRESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\001*\332A\017database,writes'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="Rollback",
- full_name="google.firestore.v1beta1.Firestore.Rollback",
- index=8,
- containing_service=None,
- input_type=_ROLLBACKREQUEST,
- output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- '\202\323\344\223\002B"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\001*\332A\024database,transaction'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="RunQuery",
- full_name="google.firestore.v1beta1.Firestore.RunQuery",
- index=9,
- containing_service=None,
- input_type=_RUNQUERYREQUEST,
- output_type=_RUNQUERYRESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002\207\001";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\001*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\001*'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="Write",
- full_name="google.firestore.v1beta1.Firestore.Write",
- index=10,
- containing_service=None,
- input_type=_WRITEREQUEST,
- output_type=_WRITERESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002?":/v1beta1/{database=projects/*/databases/*}/documents:write:\001*'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="Listen",
- full_name="google.firestore.v1beta1.Firestore.Listen",
- index=11,
- containing_service=None,
- input_type=_LISTENREQUEST,
- output_type=_LISTENRESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\001*'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ListCollectionIds",
- full_name="google.firestore.v1beta1.Firestore.ListCollectionIds",
- index=12,
- containing_service=None,
- input_type=_LISTCOLLECTIONIDSREQUEST,
- output_type=_LISTCOLLECTIONIDSRESPONSE,
- serialized_options=_b(
- '\202\323\344\223\002\231\001"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\001*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\001*\332A\006parent'
- ),
- ),
- ],
-)
-_sym_db.RegisterServiceDescriptor(_FIRESTORE)
-
-DESCRIPTOR.services_by_name["Firestore"] = _FIRESTORE
-
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py
deleted file mode 100644
index cf23b20c38..0000000000
--- a/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py
+++ /dev/null
@@ -1,294 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-from google.cloud.firestore_v1beta1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2,
-)
-from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
-
-
-class FirestoreStub(object):
- """Specification of the Firestore API.
-
- The Cloud Firestore service.
-
- This service exposes several types of comparable timestamps:
-
- * `create_time` - The time at which a document was created. Changes only
- when a document is deleted, then re-created. Increases in a strict
- monotonic fashion.
- * `update_time` - The time at which a document was last updated. Changes
- every time a document is modified. Does not change when a write results
- in no modifications. Increases in a strict monotonic fashion.
- * `read_time` - The time at which a particular state was observed. Used
- to denote a consistent snapshot of the database or the time at which a
- Document was observed to not exist.
- * `commit_time` - The time at which the writes in a transaction were
- committed. Any read with an equal or greater `read_time` is guaranteed
- to see the effects of the transaction.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.GetDocument = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/GetDocument",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
- )
- self.ListDocuments = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/ListDocuments",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.FromString,
- )
- self.CreateDocument = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/CreateDocument",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
- )
- self.UpdateDocument = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/UpdateDocument",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString,
- )
- self.DeleteDocument = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/DeleteDocument",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
- self.BatchGetDocuments = channel.unary_stream(
- "/google.firestore.v1beta1.Firestore/BatchGetDocuments",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.FromString,
- )
- self.BeginTransaction = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/BeginTransaction",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.FromString,
- )
- self.Commit = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/Commit",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.FromString,
- )
- self.Rollback = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/Rollback",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
- self.RunQuery = channel.unary_stream(
- "/google.firestore.v1beta1.Firestore/RunQuery",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.FromString,
- )
- self.Write = channel.stream_stream(
- "/google.firestore.v1beta1.Firestore/Write",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.FromString,
- )
- self.Listen = channel.stream_stream(
- "/google.firestore.v1beta1.Firestore/Listen",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.FromString,
- )
- self.ListCollectionIds = channel.unary_unary(
- "/google.firestore.v1beta1.Firestore/ListCollectionIds",
- request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.FromString,
- )
-
-
-class FirestoreServicer(object):
- """Specification of the Firestore API.
-
- The Cloud Firestore service.
-
- This service exposes several types of comparable timestamps:
-
- * `create_time` - The time at which a document was created. Changes only
- when a document is deleted, then re-created. Increases in a strict
- monotonic fashion.
- * `update_time` - The time at which a document was last updated. Changes
- every time a document is modified. Does not change when a write results
- in no modifications. Increases in a strict monotonic fashion.
- * `read_time` - The time at which a particular state was observed. Used
- to denote a consistent snapshot of the database or the time at which a
- Document was observed to not exist.
- * `commit_time` - The time at which the writes in a transaction were
- committed. Any read with an equal or greater `read_time` is guaranteed
- to see the effects of the transaction.
- """
-
- def GetDocument(self, request, context):
- """Gets a single document.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ListDocuments(self, request, context):
- """Lists documents.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def CreateDocument(self, request, context):
- """Creates a new document.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def UpdateDocument(self, request, context):
- """Updates or inserts a document.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def DeleteDocument(self, request, context):
- """Deletes a document.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def BatchGetDocuments(self, request, context):
- """Gets multiple documents.
-
- Documents returned by this method are not guaranteed to be returned in the
- same order that they were requested.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def BeginTransaction(self, request, context):
- """Starts a new transaction.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def Commit(self, request, context):
- """Commits a transaction, while optionally updating documents.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def Rollback(self, request, context):
- """Rolls back a transaction.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def RunQuery(self, request, context):
- """Runs a query.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def Write(self, request_iterator, context):
- """Streams batches of document updates and deletes, in order.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def Listen(self, request_iterator, context):
- """Listens to changes.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ListCollectionIds(self, request, context):
- """Lists all the collection IDs underneath a document.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
-
-def add_FirestoreServicer_to_server(servicer, server):
- rpc_method_handlers = {
- "GetDocument": grpc.unary_unary_rpc_method_handler(
- servicer.GetDocument,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString,
- ),
- "ListDocuments": grpc.unary_unary_rpc_method_handler(
- servicer.ListDocuments,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.SerializeToString,
- ),
- "CreateDocument": grpc.unary_unary_rpc_method_handler(
- servicer.CreateDocument,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString,
- ),
- "UpdateDocument": grpc.unary_unary_rpc_method_handler(
- servicer.UpdateDocument,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString,
- ),
- "DeleteDocument": grpc.unary_unary_rpc_method_handler(
- servicer.DeleteDocument,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- "BatchGetDocuments": grpc.unary_stream_rpc_method_handler(
- servicer.BatchGetDocuments,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.SerializeToString,
- ),
- "BeginTransaction": grpc.unary_unary_rpc_method_handler(
- servicer.BeginTransaction,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.SerializeToString,
- ),
- "Commit": grpc.unary_unary_rpc_method_handler(
- servicer.Commit,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.SerializeToString,
- ),
- "Rollback": grpc.unary_unary_rpc_method_handler(
- servicer.Rollback,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- "RunQuery": grpc.unary_stream_rpc_method_handler(
- servicer.RunQuery,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.SerializeToString,
- ),
- "Write": grpc.stream_stream_rpc_method_handler(
- servicer.Write,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.SerializeToString,
- ),
- "Listen": grpc.stream_stream_rpc_method_handler(
- servicer.Listen,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.SerializeToString,
- ),
- "ListCollectionIds": grpc.unary_unary_rpc_method_handler(
- servicer.ListCollectionIds,
- request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.FromString,
- response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- "google.firestore.v1beta1.Firestore", rpc_method_handlers
- )
- server.add_generic_rpc_handlers((generic_handler,))
diff --git a/google/cloud/firestore_v1beta1/proto/index.proto b/google/cloud/firestore_v1beta1/proto/index.proto
deleted file mode 100644
index c5784e0eaa..0000000000
--- a/google/cloud/firestore_v1beta1/proto/index.proto
+++ /dev/null
@@ -1,102 +0,0 @@
-// Copyright 2018 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1beta1;
-
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "IndexProto";
-option java_package = "com.google.firestore.admin.v1beta1";
-option objc_class_prefix = "GCFS";
-
-
-// A field of an index.
-message IndexField {
- // The mode determines how a field is indexed.
- enum Mode {
- // The mode is unspecified.
- MODE_UNSPECIFIED = 0;
-
- // The field's values are indexed so as to support sequencing in
- // ascending order and also query by <, >, <=, >=, and =.
- ASCENDING = 2;
-
- // The field's values are indexed so as to support sequencing in
- // descending order and also query by <, >, <=, >=, and =.
- DESCENDING = 3;
-
- // The field's array values are indexed so as to support membership using
- // ARRAY_CONTAINS queries.
- ARRAY_CONTAINS = 4;
- }
-
- // The path of the field. Must match the field path specification described
- // by [google.firestore.v1beta1.Document.fields][fields].
- // Special field path `__name__` may be used by itself or at the end of a
- // path. `__type__` may be used only at the end of path.
- string field_path = 1;
-
- // The field's mode.
- Mode mode = 2;
-}
-
-// An index definition.
-message Index {
- // The state of an index. During index creation, an index will be in the
- // `CREATING` state. If the index is created successfully, it will transition
- // to the `READY` state. If the index is not able to be created, it will
- // transition to the `ERROR` state.
- enum State {
- // The state is unspecified.
- STATE_UNSPECIFIED = 0;
-
- // The index is being created.
- // There is an active long-running operation for the index.
- // The index is updated when writing a document.
- // Some index data may exist.
- CREATING = 3;
-
- // The index is ready to be used.
- // The index is updated when writing a document.
- // The index is fully populated from all stored documents it applies to.
- READY = 2;
-
- // The index was being created, but something went wrong.
- // There is no active long-running operation for the index,
- // and the most recently finished long-running operation failed.
- // The index is not updated when writing a document.
- // Some index data may exist.
- ERROR = 5;
- }
-
- // The resource name of the index.
- // Output only.
- string name = 1;
-
- // The collection ID to which this index applies. Required.
- string collection_id = 2;
-
- // The fields to index.
- repeated IndexField fields = 3;
-
- // The state of the index.
- // Output only.
- State state = 6;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/location.proto b/google/cloud/firestore_v1beta1/proto/location.proto
deleted file mode 100644
index db7e8544b7..0000000000
--- a/google/cloud/firestore_v1beta1/proto/location.proto
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright 2018 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1beta1;
-
-import "google/api/annotations.proto";
-import "google/type/latlng.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin";
-option java_multiple_files = true;
-option java_outer_classname = "LocationProto";
-option java_package = "com.google.firestore.admin.v1beta1";
-option objc_class_prefix = "GCFS";
-
-
-// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata].
-message LocationMetadata {
-
-}
diff --git a/google/cloud/firestore_v1beta1/proto/operation.proto b/google/cloud/firestore_v1beta1/proto/operation.proto
deleted file mode 100644
index c2a1b001e6..0000000000
--- a/google/cloud/firestore_v1beta1/proto/operation.proto
+++ /dev/null
@@ -1,203 +0,0 @@
-// Copyright 2018 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.admin.v1beta2;
-
-import "google/api/annotations.proto";
-import "google/firestore/admin/v1beta2/index.proto";
-import "google/protobuf/timestamp.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.Admin.V1Beta2";
-option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1beta2;admin";
-option java_multiple_files = true;
-option java_outer_classname = "OperationProto";
-option java_package = "com.google.firestore.admin.v1beta2";
-option objc_class_prefix = "GCFS";
-
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta2.FirestoreAdmin.CreateIndex].
-message IndexOperationMetadata {
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The index resource that this operation is acting on. For example:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
- string index = 3;
-
- // The state of the operation.
- OperationState state = 4;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 5;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 6;
-}
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.UpdateField][google.firestore.admin.v1beta2.FirestoreAdmin.UpdateField].
-message FieldOperationMetadata {
- // Information about an index configuration change.
- message IndexConfigDelta {
- // Specifies how the index is changing.
- enum ChangeType {
- // The type of change is not specified or known.
- CHANGE_TYPE_UNSPECIFIED = 0;
-
- // The single field index is being added.
- ADD = 1;
-
- // The single field index is being removed.
- REMOVE = 2;
- }
-
- // Specifies how the index is changing.
- ChangeType change_type = 1;
-
- // The index being changed.
- Index index = 2;
- }
-
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The field resource that this operation is acting on. For example:
- // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
- string field = 3;
-
- // A list of [IndexConfigDelta][google.firestore.admin.v1beta2.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this
- // operation.
- repeated IndexConfigDelta index_config_deltas = 4;
-
- // The state of the operation.
- OperationState state = 5;
-
- // The progress, in documents, of this operation.
- Progress document_progress = 6;
-
- // The progress, in bytes, of this operation.
- Progress bytes_progress = 7;
-}
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ExportDocuments].
-message ExportDocumentsMetadata {
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The state of the export operation.
- OperationState operation_state = 3;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 4;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 5;
-
- // Which collection ids are being exported.
- repeated string collection_ids = 6;
-
- // Where the entities are being exported to.
- string output_uri_prefix = 7;
-}
-
-// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from
-// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ImportDocuments].
-message ImportDocumentsMetadata {
- // The time this operation started.
- google.protobuf.Timestamp start_time = 1;
-
- // The time this operation completed. Will be unset if operation still in
- // progress.
- google.protobuf.Timestamp end_time = 2;
-
- // The state of the import operation.
- OperationState operation_state = 3;
-
- // The progress, in documents, of this operation.
- Progress progress_documents = 4;
-
- // The progress, in bytes, of this operation.
- Progress progress_bytes = 5;
-
- // Which collection ids are being imported.
- repeated string collection_ids = 6;
-
- // The location of the documents being imported.
- string input_uri_prefix = 7;
-}
-
-// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field.
-message ExportDocumentsResponse {
- // Location of the output files. This can be used to begin an import
- // into Cloud Firestore (this project or another project) after the operation
- // completes successfully.
- string output_uri_prefix = 1;
-}
-
-// Describes the progress of the operation.
-// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1beta2.Progress]
-// is used.
-message Progress {
- // The amount of work estimated.
- int64 estimated_work = 1;
-
- // The amount of work completed.
- int64 completed_work = 2;
-}
-
-// Describes the state of the operation.
-enum OperationState {
- // Unspecified.
- OPERATION_STATE_UNSPECIFIED = 0;
-
- // Request is being prepared for processing.
- INITIALIZING = 1;
-
- // Request is actively being processed.
- PROCESSING = 2;
-
- // Request is in the process of being cancelled after user called
- // google.longrunning.Operations.CancelOperation on the operation.
- CANCELLING = 3;
-
- // Request has been processed and is in its finalization stage.
- FINALIZING = 4;
-
- // Request has completed successfully.
- SUCCESSFUL = 5;
-
- // Request has finished being processed, but encountered an error.
- FAILED = 6;
-
- // Request has finished being cancelled after user called
- // google.longrunning.Operations.CancelOperation.
- CANCELLED = 7;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/query.proto b/google/cloud/firestore_v1beta1/proto/query.proto
deleted file mode 100644
index 4f515fabe1..0000000000
--- a/google/cloud/firestore_v1beta1/proto/query.proto
+++ /dev/null
@@ -1,243 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1beta1;
-
-import "google/firestore/v1beta1/document.proto";
-import "google/protobuf/wrappers.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "QueryProto";
-option java_package = "com.google.firestore.v1beta1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
-
-// A Firestore query.
-message StructuredQuery {
- // A selection of a collection, such as `messages as m1`.
- message CollectionSelector {
- // The collection ID.
- // When set, selects only collections with this ID.
- string collection_id = 2;
-
- // When false, selects only collections that are immediate children of
- // the `parent` specified in the containing `RunQueryRequest`.
- // When true, selects all descendant collections.
- bool all_descendants = 3;
- }
-
- // A filter.
- message Filter {
- // The type of filter.
- oneof filter_type {
- // A composite filter.
- CompositeFilter composite_filter = 1;
-
- // A filter on a document field.
- FieldFilter field_filter = 2;
-
- // A filter that takes exactly one argument.
- UnaryFilter unary_filter = 3;
- }
- }
-
- // A filter that merges multiple other filters using the given operator.
- message CompositeFilter {
- // A composite filter operator.
- enum Operator {
- // Unspecified. This value must not be used.
- OPERATOR_UNSPECIFIED = 0;
-
- // The results are required to satisfy each of the combined filters.
- AND = 1;
- }
-
- // The operator for combining multiple filters.
- Operator op = 1;
-
- // The list of filters to combine.
- // Must contain at least one filter.
- repeated Filter filters = 2;
- }
-
- // A filter on a specific field.
- message FieldFilter {
- // A field filter operator.
- enum Operator {
- // Unspecified. This value must not be used.
- OPERATOR_UNSPECIFIED = 0;
-
- // Less than. Requires that the field come first in `order_by`.
- LESS_THAN = 1;
-
- // Less than or equal. Requires that the field come first in `order_by`.
- LESS_THAN_OR_EQUAL = 2;
-
- // Greater than. Requires that the field come first in `order_by`.
- GREATER_THAN = 3;
-
- // Greater than or equal. Requires that the field come first in
- // `order_by`.
- GREATER_THAN_OR_EQUAL = 4;
-
- // Equal.
- EQUAL = 5;
-
- // Contains. Requires that the field is an array.
- ARRAY_CONTAINS = 7;
-
- // In. Requires that `value` is a non-empty ArrayValue with at most 10
- // values.
- IN = 8;
-
- // Contains any. Requires that the field is an array and
- // `value` is a non-empty ArrayValue with at most 10 values.
- ARRAY_CONTAINS_ANY = 9;
- }
-
- // The field to filter by.
- FieldReference field = 1;
-
- // The operator to filter by.
- Operator op = 2;
-
- // The value to compare to.
- Value value = 3;
- }
-
- // A filter with a single operand.
- message UnaryFilter {
- // A unary operator.
- enum Operator {
- // Unspecified. This value must not be used.
- OPERATOR_UNSPECIFIED = 0;
-
- // Test if a field is equal to NaN.
- IS_NAN = 2;
-
- // Test if an expression evaluates to Null.
- IS_NULL = 3;
- }
-
- // The unary operator to apply.
- Operator op = 1;
-
- // The argument to the filter.
- oneof operand_type {
- // The field to which to apply the operator.
- FieldReference field = 2;
- }
- }
-
- // An order on a field.
- message Order {
- // The field to order by.
- FieldReference field = 1;
-
- // The direction to order by. Defaults to `ASCENDING`.
- Direction direction = 2;
- }
-
- // A reference to a field, such as `max(messages.time) as max_time`.
- message FieldReference {
- string field_path = 2;
- }
-
- // The projection of document's fields to return.
- message Projection {
- // The fields to return.
- //
- // If empty, all fields are returned. To only return the name
- // of the document, use `['__name__']`.
- repeated FieldReference fields = 2;
- }
-
- // A sort direction.
- enum Direction {
- // Unspecified.
- DIRECTION_UNSPECIFIED = 0;
-
- // Ascending.
- ASCENDING = 1;
-
- // Descending.
- DESCENDING = 2;
- }
-
- // The projection to return.
- Projection select = 1;
-
- // The collections to query.
- repeated CollectionSelector from = 2;
-
- // The filter to apply.
- Filter where = 3;
-
- // The order to apply to the query results.
- //
- // Firestore guarantees a stable ordering through the following rules:
- //
- // * Any field required to appear in `order_by`, that is not already
- // specified in `order_by`, is appended to the order in field name order
- // by default.
- // * If an order on `__name__` is not specified, it is appended by default.
- //
- // Fields are appended with the same sort direction as the last order
- // specified, or 'ASCENDING' if no order was specified. For example:
- //
- // * `SELECT * FROM Foo ORDER BY A` becomes
- // `SELECT * FROM Foo ORDER BY A, __name__`
- // * `SELECT * FROM Foo ORDER BY A DESC` becomes
- // `SELECT * FROM Foo ORDER BY A DESC, __name__ DESC`
- // * `SELECT * FROM Foo WHERE A > 1` becomes
- // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`
- repeated Order order_by = 4;
-
- // A starting point for the query results.
- Cursor start_at = 7;
-
- // A end point for the query results.
- Cursor end_at = 8;
-
- // The number of results to skip.
- //
- // Applies before limit, but after all other constraints. Must be >= 0 if
- // specified.
- int32 offset = 6;
-
- // The maximum number of results to return.
- //
- // Applies after all other constraints.
- // Must be >= 0 if specified.
- google.protobuf.Int32Value limit = 5;
-}
-
-// A position in a query result set.
-message Cursor {
- // The values that represent a position, in the order they appear in
- // the order by clause of a query.
- //
- // Can contain fewer values than specified in the order by clause.
- repeated Value values = 1;
-
- // If the position is just before or just after the given values, relative
- // to the sort order defined by the query.
- bool before = 2;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/query_pb2.py b/google/cloud/firestore_v1beta1/proto/query_pb2.py
deleted file mode 100644
index 154aab0d20..0000000000
--- a/google/cloud/firestore_v1beta1/proto/query_pb2.py
+++ /dev/null
@@ -1,1204 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/query.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1beta1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
-)
-from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/query.proto",
- package="google.firestore.v1beta1",
- syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1"
- ),
- serialized_pb=_b(
- '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xd9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\x8c\x03\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\xb7\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x12\x06\n\x02IN\x10\x08\x12\x16\n\x12\x41RRAY_CONTAINS_ANY\x10\t\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3'
- ),
- dependencies=[
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor(
- name="Operator",
- full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="OPERATOR_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="AND", index=1, number=1, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1161,
- serialized_end=1206,
-)
-_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR)
-
-_STRUCTUREDQUERY_FIELDFILTER_OPERATOR = _descriptor.EnumDescriptor(
- name="Operator",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="OPERATOR_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="LESS_THAN", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="LESS_THAN_OR_EQUAL",
- index=2,
- number=2,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="GREATER_THAN", index=3, number=3, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="GREATER_THAN_OR_EQUAL",
- index=4,
- number=4,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="EQUAL", index=5, number=5, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ARRAY_CONTAINS", index=6, number=7, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="IN", index=7, number=8, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ARRAY_CONTAINS_ANY",
- index=8,
- number=9,
- serialized_options=None,
- type=None,
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1422,
- serialized_end=1605,
-)
-_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_FIELDFILTER_OPERATOR)
-
-_STRUCTUREDQUERY_UNARYFILTER_OPERATOR = _descriptor.EnumDescriptor(
- name="Operator",
- full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="OPERATOR_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="IS_NAN", index=1, number=2, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="IS_NULL", index=2, number=3, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1774,
- serialized_end=1835,
-)
-_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_UNARYFILTER_OPERATOR)
-
-_STRUCTUREDQUERY_DIRECTION = _descriptor.EnumDescriptor(
- name="Direction",
- full_name="google.firestore.v1beta1.StructuredQuery.Direction",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="DIRECTION_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="ASCENDING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DESCENDING", index=2, number=2, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=2134,
- serialized_end=2203,
-)
-_sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_DIRECTION)
-
-
-_STRUCTUREDQUERY_COLLECTIONSELECTOR = _descriptor.Descriptor(
- name="CollectionSelector",
- full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="collection_id",
- full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector.collection_id",
- index=0,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="all_descendants",
- full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector.all_descendants",
- index=1,
- number=3,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=653,
- serialized_end=721,
-)
-
-_STRUCTUREDQUERY_FILTER = _descriptor.Descriptor(
- name="Filter",
- full_name="google.firestore.v1beta1.StructuredQuery.Filter",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="composite_filter",
- full_name="google.firestore.v1beta1.StructuredQuery.Filter.composite_filter",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field_filter",
- full_name="google.firestore.v1beta1.StructuredQuery.Filter.field_filter",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="unary_filter",
- full_name="google.firestore.v1beta1.StructuredQuery.Filter.unary_filter",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="filter_type",
- full_name="google.firestore.v1beta1.StructuredQuery.Filter.filter_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=724,
- serialized_end=992,
-)
-
-_STRUCTUREDQUERY_COMPOSITEFILTER = _descriptor.Descriptor(
- name="CompositeFilter",
- full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="op",
- full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.op",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="filters",
- full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.filters",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=995,
- serialized_end=1206,
-)
-
-_STRUCTUREDQUERY_FIELDFILTER = _descriptor.Descriptor(
- name="FieldFilter",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.field",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="op",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.op",
- index=1,
- number=2,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.value",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_STRUCTUREDQUERY_FIELDFILTER_OPERATOR],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1209,
- serialized_end=1605,
-)
-
-_STRUCTUREDQUERY_UNARYFILTER = _descriptor.Descriptor(
- name="UnaryFilter",
- full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="op",
- full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.op",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field",
- full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.field",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_STRUCTUREDQUERY_UNARYFILTER_OPERATOR],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="operand_type",
- full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.operand_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=1608,
- serialized_end=1851,
-)
-
-_STRUCTUREDQUERY_ORDER = _descriptor.Descriptor(
- name="Order",
- full_name="google.firestore.v1beta1.StructuredQuery.Order",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field",
- full_name="google.firestore.v1beta1.StructuredQuery.Order.field",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="direction",
- full_name="google.firestore.v1beta1.StructuredQuery.Order.direction",
- index=1,
- number=2,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1854,
- serialized_end=2006,
-)
-
-_STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor(
- name="FieldReference",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldReference",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field_path",
- full_name="google.firestore.v1beta1.StructuredQuery.FieldReference.field_path",
- index=0,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2008,
- serialized_end=2044,
-)
-
-_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor(
- name="Projection",
- full_name="google.firestore.v1beta1.StructuredQuery.Projection",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields",
- index=0,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2046,
- serialized_end=2132,
-)
-
-_STRUCTUREDQUERY = _descriptor.Descriptor(
- name="StructuredQuery",
- full_name="google.firestore.v1beta1.StructuredQuery",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="select",
- full_name="google.firestore.v1beta1.StructuredQuery.select",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="from",
- full_name="google.firestore.v1beta1.StructuredQuery.from",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="where",
- full_name="google.firestore.v1beta1.StructuredQuery.where",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="order_by",
- full_name="google.firestore.v1beta1.StructuredQuery.order_by",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_at",
- full_name="google.firestore.v1beta1.StructuredQuery.start_at",
- index=4,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_at",
- full_name="google.firestore.v1beta1.StructuredQuery.end_at",
- index=5,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="offset",
- full_name="google.firestore.v1beta1.StructuredQuery.offset",
- index=6,
- number=6,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="limit",
- full_name="google.firestore.v1beta1.StructuredQuery.limit",
- index=7,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[
- _STRUCTUREDQUERY_COLLECTIONSELECTOR,
- _STRUCTUREDQUERY_FILTER,
- _STRUCTUREDQUERY_COMPOSITEFILTER,
- _STRUCTUREDQUERY_FIELDFILTER,
- _STRUCTUREDQUERY_UNARYFILTER,
- _STRUCTUREDQUERY_ORDER,
- _STRUCTUREDQUERY_FIELDREFERENCE,
- _STRUCTUREDQUERY_PROJECTION,
- ],
- enum_types=[_STRUCTUREDQUERY_DIRECTION],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=194,
- serialized_end=2203,
-)
-
-
-_CURSOR = _descriptor.Descriptor(
- name="Cursor",
- full_name="google.firestore.v1beta1.Cursor",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="values",
- full_name="google.firestore.v1beta1.Cursor.values",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="before",
- full_name="google.firestore.v1beta1.Cursor.before",
- index=1,
- number=2,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2205,
- serialized_end=2278,
-)
-
-_STRUCTUREDQUERY_COLLECTIONSELECTOR.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_FILTER.fields_by_name[
- "composite_filter"
-].message_type = _STRUCTUREDQUERY_COMPOSITEFILTER
-_STRUCTUREDQUERY_FILTER.fields_by_name[
- "field_filter"
-].message_type = _STRUCTUREDQUERY_FIELDFILTER
-_STRUCTUREDQUERY_FILTER.fields_by_name[
- "unary_filter"
-].message_type = _STRUCTUREDQUERY_UNARYFILTER
-_STRUCTUREDQUERY_FILTER.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append(
- _STRUCTUREDQUERY_FILTER.fields_by_name["composite_filter"]
-)
-_STRUCTUREDQUERY_FILTER.fields_by_name[
- "composite_filter"
-].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"]
-_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append(
- _STRUCTUREDQUERY_FILTER.fields_by_name["field_filter"]
-)
-_STRUCTUREDQUERY_FILTER.fields_by_name[
- "field_filter"
-].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"]
-_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append(
- _STRUCTUREDQUERY_FILTER.fields_by_name["unary_filter"]
-)
-_STRUCTUREDQUERY_FILTER.fields_by_name[
- "unary_filter"
-].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"]
-_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name[
- "op"
-].enum_type = _STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR
-_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name[
- "filters"
-].message_type = _STRUCTUREDQUERY_FILTER
-_STRUCTUREDQUERY_COMPOSITEFILTER.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR.containing_type = (
- _STRUCTUREDQUERY_COMPOSITEFILTER
-)
-_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[
- "field"
-].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
-_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[
- "op"
-].enum_type = _STRUCTUREDQUERY_FIELDFILTER_OPERATOR
-_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[
- "value"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
-)
-_STRUCTUREDQUERY_FIELDFILTER.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_FIELDFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_FIELDFILTER
-_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[
- "op"
-].enum_type = _STRUCTUREDQUERY_UNARYFILTER_OPERATOR
-_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[
- "field"
-].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
-_STRUCTUREDQUERY_UNARYFILTER.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_UNARYFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_UNARYFILTER
-_STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name["operand_type"].fields.append(
- _STRUCTUREDQUERY_UNARYFILTER.fields_by_name["field"]
-)
-_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[
- "field"
-].containing_oneof = _STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name["operand_type"]
-_STRUCTUREDQUERY_ORDER.fields_by_name[
- "field"
-].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
-_STRUCTUREDQUERY_ORDER.fields_by_name[
- "direction"
-].enum_type = _STRUCTUREDQUERY_DIRECTION
-_STRUCTUREDQUERY_ORDER.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY_PROJECTION.fields_by_name[
- "fields"
-].message_type = _STRUCTUREDQUERY_FIELDREFERENCE
-_STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY
-_STRUCTUREDQUERY.fields_by_name["select"].message_type = _STRUCTUREDQUERY_PROJECTION
-_STRUCTUREDQUERY.fields_by_name[
- "from"
-].message_type = _STRUCTUREDQUERY_COLLECTIONSELECTOR
-_STRUCTUREDQUERY.fields_by_name["where"].message_type = _STRUCTUREDQUERY_FILTER
-_STRUCTUREDQUERY.fields_by_name["order_by"].message_type = _STRUCTUREDQUERY_ORDER
-_STRUCTUREDQUERY.fields_by_name["start_at"].message_type = _CURSOR
-_STRUCTUREDQUERY.fields_by_name["end_at"].message_type = _CURSOR
-_STRUCTUREDQUERY.fields_by_name[
- "limit"
-].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
-_STRUCTUREDQUERY_DIRECTION.containing_type = _STRUCTUREDQUERY
-_CURSOR.fields_by_name[
- "values"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
-)
-DESCRIPTOR.message_types_by_name["StructuredQuery"] = _STRUCTUREDQUERY
-DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-StructuredQuery = _reflection.GeneratedProtocolMessageType(
- "StructuredQuery",
- (_message.Message,),
- dict(
- CollectionSelector=_reflection.GeneratedProtocolMessageType(
- "CollectionSelector",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_COLLECTIONSELECTOR,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A selection of a collection, such as ``messages as m1``.
-
-
- Attributes:
- collection_id:
- The collection ID. When set, selects only collections with
- this ID.
- all_descendants:
- When false, selects only collections that are immediate
- children of the ``parent`` specified in the containing
- ``RunQueryRequest``. When true, selects all descendant
- collections.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CollectionSelector)
- ),
- ),
- Filter=_reflection.GeneratedProtocolMessageType(
- "Filter",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_FILTER,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A filter.
-
-
- Attributes:
- filter_type:
- The type of filter.
- composite_filter:
- A composite filter.
- field_filter:
- A filter on a document field.
- unary_filter:
- A filter that takes exactly one argument.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Filter)
- ),
- ),
- CompositeFilter=_reflection.GeneratedProtocolMessageType(
- "CompositeFilter",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_COMPOSITEFILTER,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A filter that merges multiple other filters using the
- given operator.
-
-
- Attributes:
- op:
- The operator for combining multiple filters.
- filters:
- The list of filters to combine. Must contain at least one
- filter.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CompositeFilter)
- ),
- ),
- FieldFilter=_reflection.GeneratedProtocolMessageType(
- "FieldFilter",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_FIELDFILTER,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A filter on a specific field.
-
-
- Attributes:
- field:
- The field to filter by.
- op:
- The operator to filter by.
- value:
- The value to compare to.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldFilter)
- ),
- ),
- UnaryFilter=_reflection.GeneratedProtocolMessageType(
- "UnaryFilter",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_UNARYFILTER,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A filter with a single operand.
-
-
- Attributes:
- op:
- The unary operator to apply.
- operand_type:
- The argument to the filter.
- field:
- The field to which to apply the operator.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.UnaryFilter)
- ),
- ),
- Order=_reflection.GeneratedProtocolMessageType(
- "Order",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_ORDER,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""An order on a field.
-
-
- Attributes:
- field:
- The field to order by.
- direction:
- The direction to order by. Defaults to ``ASCENDING``.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Order)
- ),
- ),
- FieldReference=_reflection.GeneratedProtocolMessageType(
- "FieldReference",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_FIELDREFERENCE,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A reference to a field, such as
- ``max(messages.time) as max_time``.
-
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference)
- ),
- ),
- Projection=_reflection.GeneratedProtocolMessageType(
- "Projection",
- (_message.Message,),
- dict(
- DESCRIPTOR=_STRUCTUREDQUERY_PROJECTION,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""The projection of document's fields to return.
-
-
- Attributes:
- fields:
- The fields to return. If empty, all fields are returned. To
- only return the name of the document, use ``['__name__']``.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection)
- ),
- ),
- DESCRIPTOR=_STRUCTUREDQUERY,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A Firestore query.
-
-
- Attributes:
- select:
- The projection to return.
- from:
- The collections to query.
- where:
- The filter to apply.
- order_by:
- The order to apply to the query results. Firestore guarantees
- a stable ordering through the following rules: - Any field
- required to appear in ``order_by``, that is not already
- specified in ``order_by``, is appended to the order in field
- name order by default. - If an order on ``__name__`` is
- not specified, it is appended by default. Fields are
- appended with the same sort direction as the last order
- specified, or 'ASCENDING' if no order was specified. For
- example: - ``SELECT * FROM Foo ORDER BY A`` becomes
- ``SELECT * FROM Foo ORDER BY A, __name__`` - ``SELECT * FROM
- Foo ORDER BY A DESC`` becomes ``SELECT * FROM Foo ORDER BY
- A DESC, __name__ DESC`` - ``SELECT * FROM Foo WHERE A > 1``
- becomes ``SELECT * FROM Foo WHERE A > 1 ORDER BY A,
- __name__``
- start_at:
- A starting point for the query results.
- end_at:
- A end point for the query results.
- offset:
- The number of results to skip. Applies before limit, but
- after all other constraints. Must be >= 0 if specified.
- limit:
- The maximum number of results to return. Applies after all
- other constraints. Must be >= 0 if specified.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery)
- ),
-)
-_sym_db.RegisterMessage(StructuredQuery)
-_sym_db.RegisterMessage(StructuredQuery.CollectionSelector)
-_sym_db.RegisterMessage(StructuredQuery.Filter)
-_sym_db.RegisterMessage(StructuredQuery.CompositeFilter)
-_sym_db.RegisterMessage(StructuredQuery.FieldFilter)
-_sym_db.RegisterMessage(StructuredQuery.UnaryFilter)
-_sym_db.RegisterMessage(StructuredQuery.Order)
-_sym_db.RegisterMessage(StructuredQuery.FieldReference)
-_sym_db.RegisterMessage(StructuredQuery.Projection)
-
-Cursor = _reflection.GeneratedProtocolMessageType(
- "Cursor",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CURSOR,
- __module__="google.cloud.firestore_v1beta1.proto.query_pb2",
- __doc__="""A position in a query result set.
-
-
- Attributes:
- values:
- The values that represent a position, in the order they appear
- in the order by clause of a query. Can contain fewer values
- than specified in the order by clause.
- before:
- If the position is just before or just after the given values,
- relative to the sort order defined by the query.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Cursor)
- ),
-)
-_sym_db.RegisterMessage(Cursor)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py b/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py
deleted file mode 100644
index 18dc587068..0000000000
--- a/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py
+++ /dev/null
@@ -1,2190 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: test_v1beta1.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1beta1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2,
-)
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="test_v1beta1.proto",
- package="tests.v1beta1",
- syntax="proto3",
- serialized_pb=_b(
- '\n\x12test_v1beta1.proto\x12\rtests.v1beta1\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"/\n\tTestSuite\x12"\n\x05tests\x18\x01 \x03(\x0b\x32\x13.tests.v1beta1.Test"\x88\x03\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12%\n\x03get\x18\x02 \x01(\x0b\x32\x16.tests.v1beta1.GetTestH\x00\x12+\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x19.tests.v1beta1.CreateTestH\x00\x12%\n\x03set\x18\x04 \x01(\x0b\x32\x16.tests.v1beta1.SetTestH\x00\x12+\n\x06update\x18\x05 \x01(\x0b\x32\x19.tests.v1beta1.UpdateTestH\x00\x12\x36\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x1e.tests.v1beta1.UpdatePathsTestH\x00\x12+\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x19.tests.v1beta1.DeleteTestH\x00\x12)\n\x05query\x18\x08 \x01(\x0b\x32\x18.tests.v1beta1.QueryTestH\x00\x12+\n\x06listen\x18\t \x01(\x0b\x32\x19.tests.v1beta1.ListenTestH\x00\x42\x06\n\x04test"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa8\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12(\n\x06option\x18\x02 \x01(\x0b\x32\x18.tests.v1beta1.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xf5\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12-\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"B\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12(\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath"\x92\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12&\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\x15.tests.v1beta1.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xe0\x02\n\x06\x43lause\x12\'\n\x06select\x18\x01 \x01(\x0b\x32\x15.tests.v1beta1.SelectH\x00\x12%\n\x05where\x18\x02 \x01(\x0b\x32\x14.tests.v1beta1.WhereH\x00\x12*\n\x08order_by\x18\x03 \x01(\x0b\x32\x16.tests.v1beta1.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12)\n\x08start_at\x18\x06 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12,\n\x0bstart_after\x18\x07 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12\'\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12+\n\nend_before\x18\t \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x42\x08\n\x06\x63lause"2\n\x06Select\x12(\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath"O\n\x05Where\x12&\n\x04path\x18\x01 \x01(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"D\n\x07OrderBy\x12&\n\x04path\x18\x01 \x01(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"O\n\x06\x43ursor\x12\x30\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x1a.tests.v1beta1.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x87\x01\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12*\n\tsnapshots\x18\x02 \x03(\x0b\x32\x17.tests.v1beta1.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x96\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12)\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x18.tests.v1beta1.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd3\x01\n\tDocChange\x12+\n\x04kind\x18\x01 \x01(\x0e\x32\x1d.tests.v1beta1.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
- ),
- dependencies=[
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- ],
-)
-
-
-_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
- name="Kind",
- full_name="tests.v1beta1.DocChange.Kind",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="ADDED", index=1, number=1, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REMOVED", index=2, number=2, options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="MODIFIED", index=3, number=3, options=None, type=None
- ),
- ],
- containing_type=None,
- options=None,
- serialized_start=3107,
- serialized_end=3173,
-)
-_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
-
-
-_TESTSUITE = _descriptor.Descriptor(
- name="TestSuite",
- full_name="tests.v1beta1.TestSuite",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="tests",
- full_name="tests.v1beta1.TestSuite.tests",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=278,
- serialized_end=325,
-)
-
-
-_TEST = _descriptor.Descriptor(
- name="Test",
- full_name="tests.v1beta1.Test",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="description",
- full_name="tests.v1beta1.Test.description",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="get",
- full_name="tests.v1beta1.Test.get",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="create",
- full_name="tests.v1beta1.Test.create",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="set",
- full_name="tests.v1beta1.Test.set",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update",
- full_name="tests.v1beta1.Test.update",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_paths",
- full_name="tests.v1beta1.Test.update_paths",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="delete",
- full_name="tests.v1beta1.Test.delete",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query",
- full_name="tests.v1beta1.Test.query",
- index=7,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="listen",
- full_name="tests.v1beta1.Test.listen",
- index=8,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="test",
- full_name="tests.v1beta1.Test.test",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=328,
- serialized_end=720,
-)
-
-
-_GETTEST = _descriptor.Descriptor(
- name="GetTest",
- full_name="tests.v1beta1.GetTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1beta1.GetTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1beta1.GetTest.request",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=722,
- serialized_end=816,
-)
-
-
-_CREATETEST = _descriptor.Descriptor(
- name="CreateTest",
- full_name="tests.v1beta1.CreateTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1beta1.CreateTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1beta1.CreateTest.json_data",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1beta1.CreateTest.request",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.CreateTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=819,
- serialized_end=948,
-)
-
-
-_SETTEST = _descriptor.Descriptor(
- name="SetTest",
- full_name="tests.v1beta1.SetTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1beta1.SetTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="option",
- full_name="tests.v1beta1.SetTest.option",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1beta1.SetTest.json_data",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1beta1.SetTest.request",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.SetTest.is_error",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=951,
- serialized_end=1119,
-)
-
-
-_UPDATETEST = _descriptor.Descriptor(
- name="UpdateTest",
- full_name="tests.v1beta1.UpdateTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1beta1.UpdateTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="tests.v1beta1.UpdateTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1beta1.UpdateTest.json_data",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1beta1.UpdateTest.request",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.UpdateTest.is_error",
- index=4,
- number=5,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1122,
- serialized_end=1313,
-)
-
-
-_UPDATEPATHSTEST = _descriptor.Descriptor(
- name="UpdatePathsTest",
- full_name="tests.v1beta1.UpdatePathsTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1beta1.UpdatePathsTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="tests.v1beta1.UpdatePathsTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field_paths",
- full_name="tests.v1beta1.UpdatePathsTest.field_paths",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_values",
- full_name="tests.v1beta1.UpdatePathsTest.json_values",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1beta1.UpdatePathsTest.request",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.UpdatePathsTest.is_error",
- index=5,
- number=6,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1316,
- serialized_end=1561,
-)
-
-
-_DELETETEST = _descriptor.Descriptor(
- name="DeleteTest",
- full_name="tests.v1beta1.DeleteTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_ref_path",
- full_name="tests.v1beta1.DeleteTest.doc_ref_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="precondition",
- full_name="tests.v1beta1.DeleteTest.precondition",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="request",
- full_name="tests.v1beta1.DeleteTest.request",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.DeleteTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1564,
- serialized_end=1736,
-)
-
-
-_SETOPTION = _descriptor.Descriptor(
- name="SetOption",
- full_name="tests.v1beta1.SetOption",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="all",
- full_name="tests.v1beta1.SetOption.all",
- index=0,
- number=1,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="tests.v1beta1.SetOption.fields",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1738,
- serialized_end=1804,
-)
-
-
-_QUERYTEST = _descriptor.Descriptor(
- name="QueryTest",
- full_name="tests.v1beta1.QueryTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="coll_path",
- full_name="tests.v1beta1.QueryTest.coll_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="clauses",
- full_name="tests.v1beta1.QueryTest.clauses",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="query",
- full_name="tests.v1beta1.QueryTest.query",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.QueryTest.is_error",
- index=3,
- number=4,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1807,
- serialized_end=1953,
-)
-
-
-_CLAUSE = _descriptor.Descriptor(
- name="Clause",
- full_name="tests.v1beta1.Clause",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="select",
- full_name="tests.v1beta1.Clause.select",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="where",
- full_name="tests.v1beta1.Clause.where",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="order_by",
- full_name="tests.v1beta1.Clause.order_by",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="offset",
- full_name="tests.v1beta1.Clause.offset",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="limit",
- full_name="tests.v1beta1.Clause.limit",
- index=4,
- number=5,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_at",
- full_name="tests.v1beta1.Clause.start_at",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="start_after",
- full_name="tests.v1beta1.Clause.start_after",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_at",
- full_name="tests.v1beta1.Clause.end_at",
- index=7,
- number=8,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="end_before",
- full_name="tests.v1beta1.Clause.end_before",
- index=8,
- number=9,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="clause",
- full_name="tests.v1beta1.Clause.clause",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=1956,
- serialized_end=2308,
-)
-
-
-_SELECT = _descriptor.Descriptor(
- name="Select",
- full_name="tests.v1beta1.Select",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="fields",
- full_name="tests.v1beta1.Select.fields",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2310,
- serialized_end=2360,
-)
-
-
-_WHERE = _descriptor.Descriptor(
- name="Where",
- full_name="tests.v1beta1.Where",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="tests.v1beta1.Where.path",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="op",
- full_name="tests.v1beta1.Where.op",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_value",
- full_name="tests.v1beta1.Where.json_value",
- index=2,
- number=3,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2362,
- serialized_end=2441,
-)
-
-
-_ORDERBY = _descriptor.Descriptor(
- name="OrderBy",
- full_name="tests.v1beta1.OrderBy",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="tests.v1beta1.OrderBy.path",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="direction",
- full_name="tests.v1beta1.OrderBy.direction",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2443,
- serialized_end=2511,
-)
-
-
-_CURSOR = _descriptor.Descriptor(
- name="Cursor",
- full_name="tests.v1beta1.Cursor",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="doc_snapshot",
- full_name="tests.v1beta1.Cursor.doc_snapshot",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_values",
- full_name="tests.v1beta1.Cursor.json_values",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2513,
- serialized_end=2592,
-)
-
-
-_DOCSNAPSHOT = _descriptor.Descriptor(
- name="DocSnapshot",
- full_name="tests.v1beta1.DocSnapshot",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="path",
- full_name="tests.v1beta1.DocSnapshot.path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="json_data",
- full_name="tests.v1beta1.DocSnapshot.json_data",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2594,
- serialized_end=2640,
-)
-
-
-_FIELDPATH = _descriptor.Descriptor(
- name="FieldPath",
- full_name="tests.v1beta1.FieldPath",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field",
- full_name="tests.v1beta1.FieldPath.field",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- )
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2642,
- serialized_end=2668,
-)
-
-
-_LISTENTEST = _descriptor.Descriptor(
- name="ListenTest",
- full_name="tests.v1beta1.ListenTest",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="responses",
- full_name="tests.v1beta1.ListenTest.responses",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="snapshots",
- full_name="tests.v1beta1.ListenTest.snapshots",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="is_error",
- full_name="tests.v1beta1.ListenTest.is_error",
- index=2,
- number=3,
- type=8,
- cpp_type=7,
- label=1,
- has_default_value=False,
- default_value=False,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2671,
- serialized_end=2806,
-)
-
-
-_SNAPSHOT = _descriptor.Descriptor(
- name="Snapshot",
- full_name="tests.v1beta1.Snapshot",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="docs",
- full_name="tests.v1beta1.Snapshot.docs",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="changes",
- full_name="tests.v1beta1.Snapshot.changes",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="tests.v1beta1.Snapshot.read_time",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2809,
- serialized_end=2959,
-)
-
-
-_DOCCHANGE = _descriptor.Descriptor(
- name="DocChange",
- full_name="tests.v1beta1.DocChange",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="kind",
- full_name="tests.v1beta1.DocChange.kind",
- index=0,
- number=1,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="doc",
- full_name="tests.v1beta1.DocChange.doc",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="old_index",
- full_name="tests.v1beta1.DocChange.old_index",
- index=2,
- number=3,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="new_index",
- full_name="tests.v1beta1.DocChange.new_index",
- index=3,
- number=4,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_DOCCHANGE_KIND],
- options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=2962,
- serialized_end=3173,
-)
-
-_TESTSUITE.fields_by_name["tests"].message_type = _TEST
-_TEST.fields_by_name["get"].message_type = _GETTEST
-_TEST.fields_by_name["create"].message_type = _CREATETEST
-_TEST.fields_by_name["set"].message_type = _SETTEST
-_TEST.fields_by_name["update"].message_type = _UPDATETEST
-_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
-_TEST.fields_by_name["delete"].message_type = _DELETETEST
-_TEST.fields_by_name["query"].message_type = _QUERYTEST
-_TEST.fields_by_name["listen"].message_type = _LISTENTEST
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
-_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
-_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
-_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
-_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
-_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
-_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
-_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
-_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
-_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
-_GETTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
-)
-_CREATETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_SETTEST.fields_by_name["option"].message_type = _SETOPTION
-_SETTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_UPDATETEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_UPDATETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_UPDATEPATHSTEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
-_UPDATEPATHSTEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_DELETETEST.fields_by_name[
- "precondition"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_DELETETEST.fields_by_name[
- "request"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST
-)
-_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
-_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
-_QUERYTEST.fields_by_name[
- "query"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
-)
-_CLAUSE.fields_by_name["select"].message_type = _SELECT
-_CLAUSE.fields_by_name["where"].message_type = _WHERE
-_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
-_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
-_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
-_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
-_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
-_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
-_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
-_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
-_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
-_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
-_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
-_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
- "clause"
-]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
-_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
-_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
-_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
-_WHERE.fields_by_name["path"].message_type = _FIELDPATH
-_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
-_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
-_LISTENTEST.fields_by_name[
- "responses"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
-)
-_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
-_SNAPSHOT.fields_by_name[
- "docs"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
-_SNAPSHOT.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
-_DOCCHANGE.fields_by_name[
- "doc"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_DOCCHANGE_KIND.containing_type = _DOCCHANGE
-DESCRIPTOR.message_types_by_name["TestSuite"] = _TESTSUITE
-DESCRIPTOR.message_types_by_name["Test"] = _TEST
-DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
-DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
-DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
-DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
-DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
-DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
-DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
-DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
-DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
-DESCRIPTOR.message_types_by_name["Select"] = _SELECT
-DESCRIPTOR.message_types_by_name["Where"] = _WHERE
-DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
-DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
-DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
-DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
-DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
-DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
-DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-TestSuite = _reflection.GeneratedProtocolMessageType(
- "TestSuite",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TESTSUITE,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.TestSuite)
- ),
-)
-_sym_db.RegisterMessage(TestSuite)
-
-Test = _reflection.GeneratedProtocolMessageType(
- "Test",
- (_message.Message,),
- dict(
- DESCRIPTOR=_TEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.Test)
- ),
-)
-_sym_db.RegisterMessage(Test)
-
-GetTest = _reflection.GeneratedProtocolMessageType(
- "GetTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETTEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.GetTest)
- ),
-)
-_sym_db.RegisterMessage(GetTest)
-
-CreateTest = _reflection.GeneratedProtocolMessageType(
- "CreateTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATETEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.CreateTest)
- ),
-)
-_sym_db.RegisterMessage(CreateTest)
-
-SetTest = _reflection.GeneratedProtocolMessageType(
- "SetTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SETTEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.SetTest)
- ),
-)
-_sym_db.RegisterMessage(SetTest)
-
-UpdateTest = _reflection.GeneratedProtocolMessageType(
- "UpdateTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATETEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.UpdateTest)
- ),
-)
-_sym_db.RegisterMessage(UpdateTest)
-
-UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
- "UpdatePathsTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEPATHSTEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.UpdatePathsTest)
- ),
-)
-_sym_db.RegisterMessage(UpdatePathsTest)
-
-DeleteTest = _reflection.GeneratedProtocolMessageType(
- "DeleteTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETETEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.DeleteTest)
- ),
-)
-_sym_db.RegisterMessage(DeleteTest)
-
-SetOption = _reflection.GeneratedProtocolMessageType(
- "SetOption",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SETOPTION,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.SetOption)
- ),
-)
-_sym_db.RegisterMessage(SetOption)
-
-QueryTest = _reflection.GeneratedProtocolMessageType(
- "QueryTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_QUERYTEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.QueryTest)
- ),
-)
-_sym_db.RegisterMessage(QueryTest)
-
-Clause = _reflection.GeneratedProtocolMessageType(
- "Clause",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CLAUSE,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.Clause)
- ),
-)
-_sym_db.RegisterMessage(Clause)
-
-Select = _reflection.GeneratedProtocolMessageType(
- "Select",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SELECT,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.Select)
- ),
-)
-_sym_db.RegisterMessage(Select)
-
-Where = _reflection.GeneratedProtocolMessageType(
- "Where",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WHERE,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.Where)
- ),
-)
-_sym_db.RegisterMessage(Where)
-
-OrderBy = _reflection.GeneratedProtocolMessageType(
- "OrderBy",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ORDERBY,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.OrderBy)
- ),
-)
-_sym_db.RegisterMessage(OrderBy)
-
-Cursor = _reflection.GeneratedProtocolMessageType(
- "Cursor",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CURSOR,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.Cursor)
- ),
-)
-_sym_db.RegisterMessage(Cursor)
-
-DocSnapshot = _reflection.GeneratedProtocolMessageType(
- "DocSnapshot",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCSNAPSHOT,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.DocSnapshot)
- ),
-)
-_sym_db.RegisterMessage(DocSnapshot)
-
-FieldPath = _reflection.GeneratedProtocolMessageType(
- "FieldPath",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FIELDPATH,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.FieldPath)
- ),
-)
-_sym_db.RegisterMessage(FieldPath)
-
-ListenTest = _reflection.GeneratedProtocolMessageType(
- "ListenTest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTENTEST,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.ListenTest)
- ),
-)
-_sym_db.RegisterMessage(ListenTest)
-
-Snapshot = _reflection.GeneratedProtocolMessageType(
- "Snapshot",
- (_message.Message,),
- dict(
- DESCRIPTOR=_SNAPSHOT,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.Snapshot)
- ),
-)
-_sym_db.RegisterMessage(Snapshot)
-
-DocChange = _reflection.GeneratedProtocolMessageType(
- "DocChange",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCCHANGE,
- __module__="test_v1beta1_pb2"
- # @@protoc_insertion_point(class_scope:tests.v1beta1.DocChange)
- ),
-)
-_sym_db.RegisterMessage(DocChange)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(
- descriptor_pb2.FileOptions(),
- _b(
- '\n&com.google.cloud.firestore.conformance\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
- ),
-)
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/write.proto b/google/cloud/firestore_v1beta1/proto/write.proto
deleted file mode 100644
index c02a2a8a1a..0000000000
--- a/google/cloud/firestore_v1beta1/proto/write.proto
+++ /dev/null
@@ -1,254 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.firestore.v1beta1;
-
-import "google/firestore/v1beta1/common.proto";
-import "google/firestore/v1beta1/document.proto";
-import "google/protobuf/timestamp.proto";
-import "google/api/annotations.proto";
-
-option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
-option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
-option java_multiple_files = true;
-option java_outer_classname = "WriteProto";
-option java_package = "com.google.firestore.v1beta1";
-option objc_class_prefix = "GCFS";
-option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
-
-// A write on a document.
-message Write {
- // The operation to execute.
- oneof operation {
- // A document to write.
- Document update = 1;
-
- // A document name to delete. In the format:
- // `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
- string delete = 2;
-
- // Applies a transformation to a document.
- // At most one `transform` per document is allowed in a given request.
- // An `update` cannot follow a `transform` on the same document in a given
- // request.
- DocumentTransform transform = 6;
- }
-
- // The fields to update in this write.
- //
- // This field can be set only when the operation is `update`.
- // If the mask is not set for an `update` and the document exists, any
- // existing data will be overwritten.
- // If the mask is set and the document on the server has fields not covered by
- // the mask, they are left unchanged.
- // Fields referenced in the mask, but not present in the input document, are
- // deleted from the document on the server.
- // The field paths in this mask must not contain a reserved field name.
- DocumentMask update_mask = 3;
-
- // An optional precondition on the document.
- //
- // The write will fail if this is set and not met by the target document.
- Precondition current_document = 4;
-}
-
-// A transformation of a document.
-message DocumentTransform {
- // A transformation of a field of the document.
- message FieldTransform {
- // A value that is calculated by the server.
- enum ServerValue {
- // Unspecified. This value must not be used.
- SERVER_VALUE_UNSPECIFIED = 0;
-
- // The time at which the server processed the request, with millisecond
- // precision.
- REQUEST_TIME = 1;
- }
-
- // The path of the field. See [Document.fields][google.firestore.v1beta1.Document.fields] for the field path syntax
- // reference.
- string field_path = 1;
-
- // The transformation to apply on the field.
- oneof transform_type {
- // Sets the field to the given server value.
- ServerValue set_to_server_value = 2;
-
- // Adds the given value to the field's current value.
- //
- // This must be an integer or a double value.
- // If the field is not an integer or double, or if the field does not yet
- // exist, the transformation will set the field to the given value.
- // If either of the given value or the current field value are doubles,
- // both values will be interpreted as doubles. Double arithmetic and
- // representation of double values follow IEEE 754 semantics.
- // If there is positive/negative integer overflow, the field is resolved
- // to the largest magnitude positive/negative integer.
- Value increment = 3;
-
- // Sets the field to the maximum of its current value and the given value.
- //
- // This must be an integer or a double value.
- // If the field is not an integer or double, or if the field does not yet
- // exist, the transformation will set the field to the given value.
- // If a maximum operation is applied where the field and the input value
- // are of mixed types (that is - one is an integer and one is a double)
- // the field takes on the type of the larger operand. If the operands are
- // equivalent (e.g. 3 and 3.0), the field does not change.
- // 0, 0.0, and -0.0 are all zero. The maximum of a zero stored value and
- // zero input value is always the stored value.
- // The maximum of any numeric value x and NaN is NaN.
- Value maximum = 4;
-
- // Sets the field to the minimum of its current value and the given value.
- //
- // This must be an integer or a double value.
- // If the field is not an integer or double, or if the field does not yet
- // exist, the transformation will set the field to the input value.
- // If a minimum operation is applied where the field and the input value
- // are of mixed types (that is - one is an integer and one is a double)
- // the field takes on the type of the smaller operand. If the operands are
- // equivalent (e.g. 3 and 3.0), the field does not change.
- // 0, 0.0, and -0.0 are all zero. The minimum of a zero stored value and
- // zero input value is always the stored value.
- // The minimum of any numeric value x and NaN is NaN.
- Value minimum = 5;
-
- // Append the given elements in order if they are not already present in
- // the current field value.
- // If the field is not an array, or if the field does not yet exist, it is
- // first set to the empty array.
- //
- // Equivalent numbers of different types (e.g. 3L and 3.0) are
- // considered equal when checking if a value is missing.
- // NaN is equal to NaN, and Null is equal to Null.
- // If the input contains multiple equivalent values, only the first will
- // be considered.
- //
- // The corresponding transform_result will be the null value.
- ArrayValue append_missing_elements = 6;
-
- // Remove all of the given elements from the array in the field.
- // If the field is not an array, or if the field does not yet exist, it is
- // set to the empty array.
- //
- // Equivalent numbers of the different types (e.g. 3L and 3.0) are
- // considered equal when deciding whether an element should be removed.
- // NaN is equal to NaN, and Null is equal to Null.
- // This will remove all equivalent values if there are duplicates.
- //
- // The corresponding transform_result will be the null value.
- ArrayValue remove_all_from_array = 7;
- }
- }
-
- // The name of the document to transform.
- string document = 1;
-
- // The list of transformations to apply to the fields of the document, in
- // order.
- // This must not be empty.
- repeated FieldTransform field_transforms = 2;
-}
-
-// The result of applying a write.
-message WriteResult {
- // The last update time of the document after applying the write. Not set
- // after a `delete`.
- //
- // If the write did not actually change the document, this will be the
- // previous update_time.
- google.protobuf.Timestamp update_time = 1;
-
- // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], in the
- // same order.
- repeated Value transform_results = 2;
-}
-
-// A [Document][google.firestore.v1beta1.Document] has changed.
-//
-// May be the result of multiple [writes][google.firestore.v1beta1.Write], including deletes, that
-// ultimately resulted in a new value for the [Document][google.firestore.v1beta1.Document].
-//
-// Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] messages may be returned for the same logical
-// change, if multiple targets are affected.
-message DocumentChange {
- // The new state of the [Document][google.firestore.v1beta1.Document].
- //
- // If `mask` is set, contains only fields that were updated or added.
- Document document = 1;
-
- // A set of target IDs of targets that match this document.
- repeated int32 target_ids = 5;
-
- // A set of target IDs for targets that no longer match this document.
- repeated int32 removed_target_ids = 6;
-}
-
-// A [Document][google.firestore.v1beta1.Document] has been deleted.
-//
-// May be the result of multiple [writes][google.firestore.v1beta1.Write], including updates, the
-// last of which deleted the [Document][google.firestore.v1beta1.Document].
-//
-// Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages may be returned for the same logical
-// delete, if multiple targets are affected.
-message DocumentDelete {
- // The resource name of the [Document][google.firestore.v1beta1.Document] that was deleted.
- string document = 1;
-
- // A set of target IDs for targets that previously matched this entity.
- repeated int32 removed_target_ids = 6;
-
- // The read timestamp at which the delete was observed.
- //
- // Greater or equal to the `commit_time` of the delete.
- google.protobuf.Timestamp read_time = 4;
-}
-
-// A [Document][google.firestore.v1beta1.Document] has been removed from the view of the targets.
-//
-// Sent if the document is no longer relevant to a target and is out of view.
-// Can be sent instead of a DocumentDelete or a DocumentChange if the server
-// can not send the new value of the document.
-//
-// Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages may be returned for the same logical
-// write or delete, if multiple targets are affected.
-message DocumentRemove {
- // The resource name of the [Document][google.firestore.v1beta1.Document] that has gone out of view.
- string document = 1;
-
- // A set of target IDs for targets that previously matched this document.
- repeated int32 removed_target_ids = 2;
-
- // The read timestamp at which the remove was observed.
- //
- // Greater or equal to the `commit_time` of the change/delete/remove.
- google.protobuf.Timestamp read_time = 4;
-}
-
-// A digest of all the documents that match a given target.
-message ExistenceFilter {
- // The target ID to which this filter applies.
- int32 target_id = 1;
-
- // The total count of documents that match [target_id][google.firestore.v1beta1.ExistenceFilter.target_id].
- //
- // If different from the count of documents in the client that match, the
- // client must manually determine which documents no longer match the target.
- int32 count = 2;
-}
diff --git a/google/cloud/firestore_v1beta1/proto/write_pb2.py b/google/cloud/firestore_v1beta1/proto/write_pb2.py
deleted file mode 100644
index f9b0aa95cb..0000000000
--- a/google/cloud/firestore_v1beta1/proto/write_pb2.py
+++ /dev/null
@@ -1,1156 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/firestore_v1beta1/proto/write.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.cloud.firestore_v1beta1.proto import (
- common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2,
-)
-from google.cloud.firestore_v1beta1.proto import (
- document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2,
-)
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/firestore_v1beta1/proto/write.proto",
- package="google.firestore.v1beta1",
- syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.firestore.v1beta1B\nWriteProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1"
- ),
- serialized_pb=_b(
- '\n0google/cloud/firestore_v1beta1/proto/write.proto\x12\x18google.firestore.v1beta1\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x9d\x02\n\x05Write\x12\x34\n\x06update\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x10\n\x06\x64\x65lete\x18\x02 \x01(\tH\x00\x12@\n\ttransform\x18\x06 \x01(\x0b\x32+.google.firestore.v1beta1.DocumentTransformH\x00\x12;\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.PreconditionB\x0b\n\toperation"\x88\x05\n\x11\x44ocumentTransform\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12T\n\x10\x66ield_transforms\x18\x02 \x03(\x0b\x32:.google.firestore.v1beta1.DocumentTransform.FieldTransform\x1a\x8a\x04\n\x0e\x46ieldTransform\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x65\n\x13set_to_server_value\x18\x02 \x01(\x0e\x32\x46.google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValueH\x00\x12\x34\n\tincrement\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07maximum\x18\x04 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07minimum\x18\x05 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12G\n\x17\x61ppend_missing_elements\x18\x06 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x45\n\x15remove_all_from_array\x18\x07 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00"=\n\x0bServerValue\x12\x1c\n\x18SERVER_VALUE_UNSPECIFIED\x10\x00\x12\x10\n\x0cREQUEST_TIME\x10\x01\x42\x10\n\x0etransform_type"z\n\x0bWriteResult\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x11transform_results\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"v\n\x0e\x44ocumentChange\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x12\n\ntarget_ids\x18\x05 \x03(\x05\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05"m\n\x0e\x44ocumentDelete\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"m\n\x0e\x44ocumentRemove\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x02 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"3\n\x0f\x45xistenceFilter\x12\x11\n\ttarget_id\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nWriteProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3'
- ),
- dependencies=[
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- ],
-)
-
-
-_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE = _descriptor.EnumDescriptor(
- name="ServerValue",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValue",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="SERVER_VALUE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="REQUEST_TIME", index=1, number=1, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1103,
- serialized_end=1164,
-)
-_sym_db.RegisterEnumDescriptor(_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE)
-
-
-_WRITE = _descriptor.Descriptor(
- name="Write",
- full_name="google.firestore.v1beta1.Write",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="update",
- full_name="google.firestore.v1beta1.Write.update",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="delete",
- full_name="google.firestore.v1beta1.Write.delete",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transform",
- full_name="google.firestore.v1beta1.Write.transform",
- index=2,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="update_mask",
- full_name="google.firestore.v1beta1.Write.update_mask",
- index=3,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="current_document",
- full_name="google.firestore.v1beta1.Write.current_document",
- index=4,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="operation",
- full_name="google.firestore.v1beta1.Write.operation",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=246,
- serialized_end=531,
-)
-
-
-_DOCUMENTTRANSFORM_FIELDTRANSFORM = _descriptor.Descriptor(
- name="FieldTransform",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="field_path",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.field_path",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="set_to_server_value",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.set_to_server_value",
- index=1,
- number=2,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="increment",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.increment",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="maximum",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.maximum",
- index=3,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="minimum",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.minimum",
- index=4,
- number=5,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="append_missing_elements",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements",
- index=5,
- number=6,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="remove_all_from_array",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array",
- index=6,
- number=7,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name="transform_type",
- full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.transform_type",
- index=0,
- containing_type=None,
- fields=[],
- )
- ],
- serialized_start=660,
- serialized_end=1182,
-)
-
-_DOCUMENTTRANSFORM = _descriptor.Descriptor(
- name="DocumentTransform",
- full_name="google.firestore.v1beta1.DocumentTransform",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.DocumentTransform.document",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="field_transforms",
- full_name="google.firestore.v1beta1.DocumentTransform.field_transforms",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=534,
- serialized_end=1182,
-)
-
-
-_WRITERESULT = _descriptor.Descriptor(
- name="WriteResult",
- full_name="google.firestore.v1beta1.WriteResult",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="update_time",
- full_name="google.firestore.v1beta1.WriteResult.update_time",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="transform_results",
- full_name="google.firestore.v1beta1.WriteResult.transform_results",
- index=1,
- number=2,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1184,
- serialized_end=1306,
-)
-
-
-_DOCUMENTCHANGE = _descriptor.Descriptor(
- name="DocumentChange",
- full_name="google.firestore.v1beta1.DocumentChange",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.DocumentChange.document",
- index=0,
- number=1,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="target_ids",
- full_name="google.firestore.v1beta1.DocumentChange.target_ids",
- index=1,
- number=5,
- type=5,
- cpp_type=1,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="removed_target_ids",
- full_name="google.firestore.v1beta1.DocumentChange.removed_target_ids",
- index=2,
- number=6,
- type=5,
- cpp_type=1,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1308,
- serialized_end=1426,
-)
-
-
-_DOCUMENTDELETE = _descriptor.Descriptor(
- name="DocumentDelete",
- full_name="google.firestore.v1beta1.DocumentDelete",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.DocumentDelete.document",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="removed_target_ids",
- full_name="google.firestore.v1beta1.DocumentDelete.removed_target_ids",
- index=1,
- number=6,
- type=5,
- cpp_type=1,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.DocumentDelete.read_time",
- index=2,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1428,
- serialized_end=1537,
-)
-
-
-_DOCUMENTREMOVE = _descriptor.Descriptor(
- name="DocumentRemove",
- full_name="google.firestore.v1beta1.DocumentRemove",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="document",
- full_name="google.firestore.v1beta1.DocumentRemove.document",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="removed_target_ids",
- full_name="google.firestore.v1beta1.DocumentRemove.removed_target_ids",
- index=1,
- number=2,
- type=5,
- cpp_type=1,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="read_time",
- full_name="google.firestore.v1beta1.DocumentRemove.read_time",
- index=2,
- number=4,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1539,
- serialized_end=1648,
-)
-
-
-_EXISTENCEFILTER = _descriptor.Descriptor(
- name="ExistenceFilter",
- full_name="google.firestore.v1beta1.ExistenceFilter",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="target_id",
- full_name="google.firestore.v1beta1.ExistenceFilter.target_id",
- index=0,
- number=1,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="count",
- full_name="google.firestore.v1beta1.ExistenceFilter.count",
- index=1,
- number=2,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=None,
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1650,
- serialized_end=1701,
-)
-
-_WRITE.fields_by_name[
- "update"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_WRITE.fields_by_name["transform"].message_type = _DOCUMENTTRANSFORM
-_WRITE.fields_by_name[
- "update_mask"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK
-)
-_WRITE.fields_by_name[
- "current_document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION
-)
-_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["update"])
-_WRITE.fields_by_name["update"].containing_oneof = _WRITE.oneofs_by_name["operation"]
-_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["delete"])
-_WRITE.fields_by_name["delete"].containing_oneof = _WRITE.oneofs_by_name["operation"]
-_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["transform"])
-_WRITE.fields_by_name["transform"].containing_oneof = _WRITE.oneofs_by_name["operation"]
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "set_to_server_value"
-].enum_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "increment"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "maximum"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "minimum"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "append_missing_elements"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "remove_all_from_array"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.containing_type = _DOCUMENTTRANSFORM
-_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE.containing_type = (
- _DOCUMENTTRANSFORM_FIELDTRANSFORM
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
- _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["set_to_server_value"]
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "set_to_server_value"
-].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
- _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["increment"]
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "increment"
-].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
- _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["maximum"]
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "maximum"
-].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
- _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["minimum"]
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "minimum"
-].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
- _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["append_missing_elements"]
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "append_missing_elements"
-].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append(
- _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["remove_all_from_array"]
-)
-_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[
- "remove_all_from_array"
-].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"]
-_DOCUMENTTRANSFORM.fields_by_name[
- "field_transforms"
-].message_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM
-_WRITERESULT.fields_by_name[
- "update_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_WRITERESULT.fields_by_name[
- "transform_results"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE
-)
-_DOCUMENTCHANGE.fields_by_name[
- "document"
-].message_type = (
- google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT
-)
-_DOCUMENTDELETE.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_DOCUMENTREMOVE.fields_by_name[
- "read_time"
-].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-DESCRIPTOR.message_types_by_name["Write"] = _WRITE
-DESCRIPTOR.message_types_by_name["DocumentTransform"] = _DOCUMENTTRANSFORM
-DESCRIPTOR.message_types_by_name["WriteResult"] = _WRITERESULT
-DESCRIPTOR.message_types_by_name["DocumentChange"] = _DOCUMENTCHANGE
-DESCRIPTOR.message_types_by_name["DocumentDelete"] = _DOCUMENTDELETE
-DESCRIPTOR.message_types_by_name["DocumentRemove"] = _DOCUMENTREMOVE
-DESCRIPTOR.message_types_by_name["ExistenceFilter"] = _EXISTENCEFILTER
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Write = _reflection.GeneratedProtocolMessageType(
- "Write",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WRITE,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A write on a document.
-
-
- Attributes:
- operation:
- The operation to execute.
- update:
- A document to write.
- delete:
- A document name to delete. In the format: ``projects/{project_
- id}/databases/{database_id}/documents/{document_path}``.
- transform:
- Applies a transformation to a document. At most one
- ``transform`` per document is allowed in a given request. An
- ``update`` cannot follow a ``transform`` on the same document
- in a given request.
- update_mask:
- The fields to update in this write. This field can be set
- only when the operation is ``update``. If the mask is not set
- for an ``update`` and the document exists, any existing data
- will be overwritten. If the mask is set and the document on
- the server has fields not covered by the mask, they are left
- unchanged. Fields referenced in the mask, but not present in
- the input document, are deleted from the document on the
- server. The field paths in this mask must not contain a
- reserved field name.
- current_document:
- An optional precondition on the document. The write will fail
- if this is set and not met by the target document.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Write)
- ),
-)
-_sym_db.RegisterMessage(Write)
-
-DocumentTransform = _reflection.GeneratedProtocolMessageType(
- "DocumentTransform",
- (_message.Message,),
- dict(
- FieldTransform=_reflection.GeneratedProtocolMessageType(
- "FieldTransform",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCUMENTTRANSFORM_FIELDTRANSFORM,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A transformation of a field of the document.
-
-
- Attributes:
- field_path:
- The path of the field. See
- [Document.fields][google.firestore.v1beta1.Document.fields]
- for the field path syntax reference.
- transform_type:
- The transformation to apply on the field.
- set_to_server_value:
- Sets the field to the given server value.
- increment:
- Adds the given value to the field's current value. This must
- be an integer or a double value. If the field is not an
- integer or double, or if the field does not yet exist, the
- transformation will set the field to the given value. If
- either of the given value or the current field value are
- doubles, both values will be interpreted as doubles. Double
- arithmetic and representation of double values follow IEEE 754
- semantics. If there is positive/negative integer overflow, the
- field is resolved to the largest magnitude positive/negative
- integer.
- maximum:
- Sets the field to the maximum of its current value and the
- given value. This must be an integer or a double value. If
- the field is not an integer or double, or if the field does
- not yet exist, the transformation will set the field to the
- given value. If a maximum operation is applied where the field
- and the input value are of mixed types (that is - one is an
- integer and one is a double) the field takes on the type of
- the larger operand. If the operands are equivalent (e.g. 3 and
- 3.0), the field does not change. 0, 0.0, and -0.0 are all
- zero. The maximum of a zero stored value and zero input value
- is always the stored value. The maximum of any numeric value x
- and NaN is NaN.
- minimum:
- Sets the field to the minimum of its current value and the
- given value. This must be an integer or a double value. If
- the field is not an integer or double, or if the field does
- not yet exist, the transformation will set the field to the
- input value. If a minimum operation is applied where the field
- and the input value are of mixed types (that is - one is an
- integer and one is a double) the field takes on the type of
- the smaller operand. If the operands are equivalent (e.g. 3
- and 3.0), the field does not change. 0, 0.0, and -0.0 are all
- zero. The minimum of a zero stored value and zero input value
- is always the stored value. The minimum of any numeric value x
- and NaN is NaN.
- append_missing_elements:
- Append the given elements in order if they are not already
- present in the current field value. If the field is not an
- array, or if the field does not yet exist, it is first set to
- the empty array. Equivalent numbers of different types (e.g.
- 3L and 3.0) are considered equal when checking if a value is
- missing. NaN is equal to NaN, and Null is equal to Null. If
- the input contains multiple equivalent values, only the first
- will be considered. The corresponding transform\_result will
- be the null value.
- remove_all_from_array:
- Remove all of the given elements from the array in the field.
- If the field is not an array, or if the field does not yet
- exist, it is set to the empty array. Equivalent numbers of
- the different types (e.g. 3L and 3.0) are considered equal
- when deciding whether an element should be removed. NaN is
- equal to NaN, and Null is equal to Null. This will remove all
- equivalent values if there are duplicates. The corresponding
- transform\_result will be the null value.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform.FieldTransform)
- ),
- ),
- DESCRIPTOR=_DOCUMENTTRANSFORM,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A transformation of a document.
-
-
- Attributes:
- document:
- The name of the document to transform.
- field_transforms:
- The list of transformations to apply to the fields of the
- document, in order. This must not be empty.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform)
- ),
-)
-_sym_db.RegisterMessage(DocumentTransform)
-_sym_db.RegisterMessage(DocumentTransform.FieldTransform)
-
-WriteResult = _reflection.GeneratedProtocolMessageType(
- "WriteResult",
- (_message.Message,),
- dict(
- DESCRIPTOR=_WRITERESULT,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""The result of applying a write.
-
-
- Attributes:
- update_time:
- The last update time of the document after applying the write.
- Not set after a ``delete``. If the write did not actually
- change the document, this will be the previous update\_time.
- transform_results:
- The results of applying each [DocumentTransform.FieldTransform
- ][google.firestore.v1beta1.DocumentTransform.FieldTransform],
- in the same order.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResult)
- ),
-)
-_sym_db.RegisterMessage(WriteResult)
-
-DocumentChange = _reflection.GeneratedProtocolMessageType(
- "DocumentChange",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCUMENTCHANGE,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A [Document][google.firestore.v1beta1.Document] has
- changed.
-
- May be the result of multiple [writes][google.firestore.v1beta1.Write],
- including deletes, that ultimately resulted in a new value for the
- [Document][google.firestore.v1beta1.Document].
-
- Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange]
- messages may be returned for the same logical change, if multiple
- targets are affected.
-
-
- Attributes:
- document:
- The new state of the
- [Document][google.firestore.v1beta1.Document]. If ``mask`` is
- set, contains only fields that were updated or added.
- target_ids:
- A set of target IDs of targets that match this document.
- removed_target_ids:
- A set of target IDs for targets that no longer match this
- document.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentChange)
- ),
-)
-_sym_db.RegisterMessage(DocumentChange)
-
-DocumentDelete = _reflection.GeneratedProtocolMessageType(
- "DocumentDelete",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCUMENTDELETE,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A [Document][google.firestore.v1beta1.Document] has been
- deleted.
-
- May be the result of multiple [writes][google.firestore.v1beta1.Write],
- including updates, the last of which deleted the
- [Document][google.firestore.v1beta1.Document].
-
- Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete]
- messages may be returned for the same logical delete, if multiple
- targets are affected.
-
-
- Attributes:
- document:
- The resource name of the
- [Document][google.firestore.v1beta1.Document] that was
- deleted.
- removed_target_ids:
- A set of target IDs for targets that previously matched this
- entity.
- read_time:
- The read timestamp at which the delete was observed. Greater
- or equal to the ``commit_time`` of the delete.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentDelete)
- ),
-)
-_sym_db.RegisterMessage(DocumentDelete)
-
-DocumentRemove = _reflection.GeneratedProtocolMessageType(
- "DocumentRemove",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DOCUMENTREMOVE,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A [Document][google.firestore.v1beta1.Document] has been
- removed from the view of the targets.
-
- Sent if the document is no longer relevant to a target and is out of
- view. Can be sent instead of a DocumentDelete or a DocumentChange if the
- server can not send the new value of the document.
-
- Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove]
- messages may be returned for the same logical write or delete, if
- multiple targets are affected.
-
-
- Attributes:
- document:
- The resource name of the
- [Document][google.firestore.v1beta1.Document] that has gone
- out of view.
- removed_target_ids:
- A set of target IDs for targets that previously matched this
- document.
- read_time:
- The read timestamp at which the remove was observed. Greater
- or equal to the ``commit_time`` of the change/delete/remove.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentRemove)
- ),
-)
-_sym_db.RegisterMessage(DocumentRemove)
-
-ExistenceFilter = _reflection.GeneratedProtocolMessageType(
- "ExistenceFilter",
- (_message.Message,),
- dict(
- DESCRIPTOR=_EXISTENCEFILTER,
- __module__="google.cloud.firestore_v1beta1.proto.write_pb2",
- __doc__="""A digest of all the documents that match a given target.
-
-
- Attributes:
- target_id:
- The target ID to which this filter applies.
- count:
- The total count of documents that match [target\_id][google.fi
- restore.v1beta1.ExistenceFilter.target\_id]. If different
- from the count of documents in the client that match, the
- client must manually determine which documents no longer match
- the target.
- """,
- # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ExistenceFilter)
- ),
-)
-_sym_db.RegisterMessage(ExistenceFilter)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py b/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py
deleted file mode 100644
index 07cb78fe03..0000000000
--- a/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
diff --git a/google/cloud/firestore_v1beta1/query.py b/google/cloud/firestore_v1beta1/query.py
deleted file mode 100644
index 70dafb0557..0000000000
--- a/google/cloud/firestore_v1beta1/query.py
+++ /dev/null
@@ -1,971 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Classes for representing queries for the Google Cloud Firestore API.
-
-A :class:`~google.cloud.firestore_v1beta1.query.Query` can be created directly
-from a :class:`~google.cloud.firestore_v1beta1.collection.Collection`,
-and that can be a more common way to create a query than direct usage of the
-constructor.
-"""
-import copy
-import math
-import warnings
-
-from google.protobuf import wrappers_pb2
-import six
-
-from google.cloud.firestore_v1beta1 import _helpers
-from google.cloud.firestore_v1beta1 import document
-from google.cloud.firestore_v1beta1 import field_path as field_path_module
-from google.cloud.firestore_v1beta1 import transforms
-from google.cloud.firestore_v1beta1.gapic import enums
-from google.cloud.firestore_v1beta1.proto import query_pb2
-from google.cloud.firestore_v1beta1.order import Order
-from google.cloud.firestore_v1beta1.watch import Watch
-
-_EQ_OP = "=="
-_operator_enum = enums.StructuredQuery.FieldFilter.Operator
-_COMPARISON_OPERATORS = {
- "<": _operator_enum.LESS_THAN,
- "<=": _operator_enum.LESS_THAN_OR_EQUAL,
- _EQ_OP: _operator_enum.EQUAL,
- ">=": _operator_enum.GREATER_THAN_OR_EQUAL,
- ">": _operator_enum.GREATER_THAN,
- "array_contains": _operator_enum.ARRAY_CONTAINS,
-}
-_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}."
-_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values'
-_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values."
-_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}."
-_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values."
-_MISSING_ORDER_BY = (
- 'The "order by" field path {!r} is not present in the cursor data {!r}. '
- "All fields sent to ``order_by()`` must be present in the fields "
- "if passed to one of ``start_at()`` / ``start_after()`` / "
- "``end_before()`` / ``end_at()`` to define a cursor."
-)
-_NO_ORDERS_FOR_CURSOR = (
- "Attempting to create a cursor with no fields to order on. "
- "When defining a cursor with one of ``start_at()`` / ``start_after()`` / "
- "``end_before()`` / ``end_at()``, all fields in the cursor must "
- "come from fields set in ``order_by()``."
-)
-_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}."
-
-
-class Query(object):
- """Represents a query to the Firestore API.
-
- Instances of this class are considered immutable: all methods that
- would modify an instance instead return a new instance.
-
- Args:
- parent (~.firestore_v1beta1.collection.Collection): The collection
- that this query applies to.
- projection (Optional[google.cloud.proto.firestore.v1beta1.\
- query_pb2.StructuredQuery.Projection]): A projection of document
- fields to limit the query results to.
- field_filters (Optional[Tuple[google.cloud.proto.firestore.v1beta1.\
- query_pb2.StructuredQuery.FieldFilter, ...]]): The filters to be
- applied in the query.
- orders (Optional[Tuple[google.cloud.proto.firestore.v1beta1.\
- query_pb2.StructuredQuery.Order, ...]]): The "order by" entries
- to use in the query.
- limit (Optional[int]): The maximum number of documents the
- query is allowed to return.
- offset (Optional[int]): The number of results to skip.
- start_at (Optional[Tuple[dict, bool]]): Two-tuple of
-
- * a mapping of fields. Any field that is present in this mapping
- must also be present in ``orders``
- * an ``after`` flag
-
- The fields and the flag combine to form a cursor used as
- a starting point in a query result set. If the ``after``
- flag is :data:`True`, the results will start just after any
- documents which have fields matching the cursor, otherwise
- any matching documents will be included in the result set.
- When the query is formed, the document values
- will be used in the order given by ``orders``.
- end_at (Optional[Tuple[dict, bool]]): Two-tuple of
-
- * a mapping of fields. Any field that is present in this mapping
- must also be present in ``orders``
- * a ``before`` flag
-
- The fields and the flag combine to form a cursor used as
- an ending point in a query result set. If the ``before``
- flag is :data:`True`, the results will end just before any
- documents which have fields matching the cursor, otherwise
- any matching documents will be included in the result set.
- When the query is formed, the document values
- will be used in the order given by ``orders``.
- """
-
- ASCENDING = "ASCENDING"
- """str: Sort query results in ascending order on a field."""
- DESCENDING = "DESCENDING"
- """str: Sort query results in descending order on a field."""
-
- def __init__(
- self,
- parent,
- projection=None,
- field_filters=(),
- orders=(),
- limit=None,
- offset=None,
- start_at=None,
- end_at=None,
- ):
- self._parent = parent
- self._projection = projection
- self._field_filters = field_filters
- self._orders = orders
- self._limit = limit
- self._offset = offset
- self._start_at = start_at
- self._end_at = end_at
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return (
- self._parent == other._parent
- and self._projection == other._projection
- and self._field_filters == other._field_filters
- and self._orders == other._orders
- and self._limit == other._limit
- and self._offset == other._offset
- and self._start_at == other._start_at
- and self._end_at == other._end_at
- )
-
- @property
- def _client(self):
- """The client of the parent collection.
-
- Returns:
- ~.firestore_v1beta1.client.Client: The client that owns
- this query.
- """
- return self._parent._client
-
- def select(self, field_paths):
- """Project documents matching query to a limited set of fields.
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
- for more information on **field paths**.
-
- If the current query already has a projection set (i.e. has already
- called :meth:`~google.cloud.firestore_v1beta1.query.Query.select`),
- this will overwrite it.
-
- Args:
- field_paths (Iterable[str, ...]): An iterable of field paths
- (``.``-delimited list of field names) to use as a projection
- of document fields in the query results.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A "projected" query. Acts as
- a copy of the current query, modified with the newly added
- projection.
- Raises:
- ValueError: If any ``field_path`` is invalid.
- """
- field_paths = list(field_paths)
- for field_path in field_paths:
- field_path_module.split_field_path(field_path) # raises
-
- new_projection = query_pb2.StructuredQuery.Projection(
- fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
- for field_path in field_paths
- ]
- )
- return self.__class__(
- self._parent,
- projection=new_projection,
- field_filters=self._field_filters,
- orders=self._orders,
- limit=self._limit,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- )
-
- def where(self, field_path, op_string, value):
- """Filter the query on a field.
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
- for more information on **field paths**.
-
- Returns a new :class:`~google.cloud.firestore_v1beta1.query.Query`
- that filters on a specific field path, according to an operation
- (e.g. ``==`` or "equals") and a particular value to be paired with
- that operation.
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) for the field to filter on.
- op_string (str): A comparison operation in the form of a string.
- Acceptable values are ``<``, ``<=``, ``==``, ``>=``
- and ``>``.
- value (Any): The value to compare the field against in the filter.
- If ``value`` is :data:`None` or a NaN, then ``==`` is the only
- allowed operation.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A filtered query. Acts as a
- copy of the current query, modified with the newly added filter.
-
- Raises:
- ValueError: If ``field_path`` is invalid.
- ValueError: If ``value`` is a NaN or :data:`None` and
- ``op_string`` is not ``==``.
- """
- field_path_module.split_field_path(field_path) # raises
-
- if value is None:
- if op_string != _EQ_OP:
- raise ValueError(_BAD_OP_NAN_NULL)
- filter_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL,
- )
- elif _isnan(value):
- if op_string != _EQ_OP:
- raise ValueError(_BAD_OP_NAN_NULL)
- filter_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=enums.StructuredQuery.UnaryFilter.Operator.IS_NAN,
- )
- elif isinstance(value, (transforms.Sentinel, transforms._ValueList)):
- raise ValueError(_INVALID_WHERE_TRANSFORM)
- else:
- filter_pb = query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=_enum_from_op_string(op_string),
- value=_helpers.encode_value(value),
- )
-
- new_filters = self._field_filters + (filter_pb,)
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=new_filters,
- orders=self._orders,
- limit=self._limit,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- )
-
- @staticmethod
- def _make_order(field_path, direction):
- """Helper for :meth:`order_by`."""
- return query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- direction=_enum_from_direction(direction),
- )
-
- def order_by(self, field_path, direction=ASCENDING):
- """Modify the query to add an order clause on a specific field.
-
- See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`
- for more information on **field paths**.
-
- Successive :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` calls
- will further refine the ordering of results returned by the query
- (i.e. the new "order by" fields will be added to existing ones).
-
- Args:
- field_path (str): A field path (``.``-delimited list of
- field names) on which to order the query results.
- direction (Optional[str]): The direction to order by. Must be one
- of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to
- :attr:`ASCENDING`.
-
- Returns:
- ~.firestore_v1beta1.query.Query: An ordered query. Acts as a
- copy of the current query, modified with the newly added
- "order by" constraint.
-
- Raises:
- ValueError: If ``field_path`` is invalid.
- ValueError: If ``direction`` is not one of :attr:`ASCENDING` or
- :attr:`DESCENDING`.
- """
- field_path_module.split_field_path(field_path) # raises
-
- order_pb = self._make_order(field_path, direction)
-
- new_orders = self._orders + (order_pb,)
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=self._field_filters,
- orders=new_orders,
- limit=self._limit,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- )
-
- def limit(self, count):
- """Limit a query to return a fixed number of results.
-
- If the current query already has a limit set, this will overwrite it.
-
- Args:
- count (int): Maximum number of documents to return that match
- the query.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A limited query. Acts as a
- copy of the current query, modified with the newly added
- "limit" filter.
- """
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=self._field_filters,
- orders=self._orders,
- limit=count,
- offset=self._offset,
- start_at=self._start_at,
- end_at=self._end_at,
- )
-
- def offset(self, num_to_skip):
- """Skip to an offset in a query.
-
- If the current query already has specified an offset, this will
- overwrite it.
-
- Args:
- num_to_skip (int): The number of results to skip at the beginning
- of query results. (Must be non-negative.)
-
- Returns:
- ~.firestore_v1beta1.query.Query: An offset query. Acts as a
- copy of the current query, modified with the newly added
- "offset" field.
- """
- return self.__class__(
- self._parent,
- projection=self._projection,
- field_filters=self._field_filters,
- orders=self._orders,
- limit=self._limit,
- offset=num_to_skip,
- start_at=self._start_at,
- end_at=self._end_at,
- )
-
- def _cursor_helper(self, document_fields, before, start):
- """Set values to be used for a ``start_at`` or ``end_at`` cursor.
-
- The values will later be used in a query protobuf.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
- before (bool): Flag indicating if the document in
- ``document_fields`` should (:data:`False`) or
- shouldn't (:data:`True`) be included in the result set.
- start (Optional[bool]): determines if the cursor is a ``start_at``
- cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`).
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
- a copy of the current query, modified with the newly added
- "start at" cursor.
- """
- if isinstance(document_fields, tuple):
- document_fields = list(document_fields)
- elif isinstance(document_fields, document.DocumentSnapshot):
- if document_fields.reference._path[:-1] != self._parent._path:
- raise ValueError(
- "Cannot use snapshot from another collection as a cursor."
- )
- else:
- # NOTE: We copy so that the caller can't modify after calling.
- document_fields = copy.deepcopy(document_fields)
-
- cursor_pair = document_fields, before
- query_kwargs = {
- "projection": self._projection,
- "field_filters": self._field_filters,
- "orders": self._orders,
- "limit": self._limit,
- "offset": self._offset,
- }
- if start:
- query_kwargs["start_at"] = cursor_pair
- query_kwargs["end_at"] = self._end_at
- else:
- query_kwargs["start_at"] = self._start_at
- query_kwargs["end_at"] = cursor_pair
-
- return self.__class__(self._parent, **query_kwargs)
-
- def start_at(self, document_fields):
- """Start query results at a particular document value.
-
- The result set will **include** the document specified by
- ``document_fields``.
-
- If the current query already has specified a start cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1beta1.query.Query.start_after` -- this will
- overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
- a copy of the current query, modified with the newly added
- "start at" cursor.
- """
- return self._cursor_helper(document_fields, before=True, start=True)
-
- def start_after(self, document_fields):
- """Start query results after a particular document value.
-
- The result set will **exclude** the document specified by
- ``document_fields``.
-
- If the current query already has specified a start cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1beta1.query.Query.start_at` -- this will
- overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
- a copy of the current query, modified with the newly added
- "start after" cursor.
- """
- return self._cursor_helper(document_fields, before=False, start=True)
-
- def end_before(self, document_fields):
- """End query results before a particular document value.
-
- The result set will **exclude** the document specified by
- ``document_fields``.
-
- If the current query already has specified an end cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1beta1.query.Query.end_at` -- this will
- overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
- a copy of the current query, modified with the newly added
- "end before" cursor.
- """
- return self._cursor_helper(document_fields, before=True, start=False)
-
- def end_at(self, document_fields):
- """End query results at a particular document value.
-
- The result set will **include** the document specified by
- ``document_fields``.
-
- If the current query already has specified an end cursor -- either
- via this method or
- :meth:`~google.cloud.firestore_v1beta1.query.Query.end_before` -- this will
- overwrite it.
-
- When the query is sent to the server, the ``document_fields`` will
- be used in the order given by fields set by
- :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`.
-
- Args:
- document_fields (Union[~.firestore_v1beta1.\
- document.DocumentSnapshot, dict, list, tuple]): a document
- snapshot or a dictionary/list/tuple of fields representing a
- query results cursor. A cursor is a collection of values that
- represent a position in a query result set.
-
- Returns:
- ~.firestore_v1beta1.query.Query: A query with cursor. Acts as
- a copy of the current query, modified with the newly added
- "end at" cursor.
- """
- return self._cursor_helper(document_fields, before=False, start=False)
-
- def _filters_pb(self):
- """Convert all the filters into a single generic Filter protobuf.
-
- This may be a lone field filter or unary filter, may be a composite
- filter or may be :data:`None`.
-
- Returns:
- google.cloud.firestore_v1beta1.types.\
- StructuredQuery.Filter: A "generic" filter representing the
- current query's filters.
- """
- num_filters = len(self._field_filters)
- if num_filters == 0:
- return None
- elif num_filters == 1:
- return _filter_pb(self._field_filters[0])
- else:
- composite_filter = query_pb2.StructuredQuery.CompositeFilter(
- op=enums.StructuredQuery.CompositeFilter.Operator.AND,
- filters=[_filter_pb(filter_) for filter_ in self._field_filters],
- )
- return query_pb2.StructuredQuery.Filter(composite_filter=composite_filter)
-
- @staticmethod
- def _normalize_projection(projection):
- """Helper: convert field paths to message."""
- if projection is not None:
-
- fields = list(projection.fields)
-
- if not fields:
- field_ref = query_pb2.StructuredQuery.FieldReference(
- field_path="__name__"
- )
- return query_pb2.StructuredQuery.Projection(fields=[field_ref])
-
- return projection
-
- def _normalize_orders(self):
- """Helper: adjust orders based on cursors, where clauses."""
- orders = list(self._orders)
- _has_snapshot_cursor = False
-
- if self._start_at:
- if isinstance(self._start_at[0], document.DocumentSnapshot):
- _has_snapshot_cursor = True
-
- if self._end_at:
- if isinstance(self._end_at[0], document.DocumentSnapshot):
- _has_snapshot_cursor = True
-
- if _has_snapshot_cursor:
- should_order = [
- _enum_from_op_string(key)
- for key in _COMPARISON_OPERATORS
- if key not in (_EQ_OP, "array_contains")
- ]
- order_keys = [order.field.field_path for order in orders]
- for filter_ in self._field_filters:
- field = filter_.field.field_path
- if filter_.op in should_order and field not in order_keys:
- orders.append(self._make_order(field, "ASCENDING"))
- if not orders:
- orders.append(self._make_order("__name__", "ASCENDING"))
- else:
- order_keys = [order.field.field_path for order in orders]
- if "__name__" not in order_keys:
- direction = orders[-1].direction # enum?
- orders.append(self._make_order("__name__", direction))
-
- return orders
-
- def _normalize_cursor(self, cursor, orders):
- """Helper: convert cursor to a list of values based on orders."""
- if cursor is None:
- return
-
- if not orders:
- raise ValueError(_NO_ORDERS_FOR_CURSOR)
-
- document_fields, before = cursor
-
- order_keys = [order.field.field_path for order in orders]
-
- if isinstance(document_fields, document.DocumentSnapshot):
- snapshot = document_fields
- document_fields = snapshot.to_dict()
- document_fields["__name__"] = snapshot.reference
-
- if isinstance(document_fields, dict):
- # Transform to list using orders
- values = []
- data = document_fields
- for order_key in order_keys:
- try:
- values.append(field_path_module.get_nested_value(order_key, data))
- except KeyError:
- msg = _MISSING_ORDER_BY.format(order_key, data)
- raise ValueError(msg)
- document_fields = values
-
- if len(document_fields) != len(orders):
- msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys)
- raise ValueError(msg)
-
- _transform_bases = (transforms.Sentinel, transforms._ValueList)
-
- for index, key_field in enumerate(zip(order_keys, document_fields)):
- key, field = key_field
-
- if isinstance(field, _transform_bases):
- msg = _INVALID_CURSOR_TRANSFORM
- raise ValueError(msg)
-
- if key == "__name__" and isinstance(field, six.string_types):
- document_fields[index] = self._parent.document(field)
-
- return document_fields, before
-
- def _to_protobuf(self):
- """Convert the current query into the equivalent protobuf.
-
- Returns:
- google.cloud.firestore_v1beta1.types.StructuredQuery: The
- query protobuf.
- """
- projection = self._normalize_projection(self._projection)
- orders = self._normalize_orders()
- start_at = self._normalize_cursor(self._start_at, orders)
- end_at = self._normalize_cursor(self._end_at, orders)
-
- query_kwargs = {
- "select": projection,
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(
- collection_id=self._parent.id
- )
- ],
- "where": self._filters_pb(),
- "order_by": orders,
- "start_at": _cursor_pb(start_at),
- "end_at": _cursor_pb(end_at),
- }
- if self._offset is not None:
- query_kwargs["offset"] = self._offset
- if self._limit is not None:
- query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit)
-
- return query_pb2.StructuredQuery(**query_kwargs)
-
- def get(self, transaction=None):
- """Deprecated alias for :meth:`stream`."""
- warnings.warn(
- "'Query.get' is deprecated: please use 'Query.stream' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return self.stream(transaction=transaction)
-
- def stream(self, transaction=None):
- """Read the documents in the collection that match this query.
-
- This sends a ``RunQuery`` RPC and then returns an iterator which
- consumes each document returned in the stream of ``RunQueryResponse``
- messages.
-
- .. note::
-
- The underlying stream of responses will time out after
- the ``max_rpc_timeout_millis`` value set in the GAPIC
- client configuration for the ``RunQuery`` API. Snapshots
- not consumed from the iterator before that point will be lost.
-
- If a ``transaction`` is used and it already has write operations
- added, this method cannot be used (i.e. read-after-write is not
- allowed).
-
- Args:
- transaction (Optional[~.firestore_v1beta1.transaction.\
- Transaction]): An existing transaction that this query will
- run in.
-
- Yields:
- ~.firestore_v1beta1.document.DocumentSnapshot: The next
- document that fulfills the query.
- """
- parent_path, expected_prefix = self._parent._parent_info()
- response_iterator = self._client._firestore_api.run_query(
- parent_path,
- self._to_protobuf(),
- transaction=_helpers.get_transaction_id(transaction),
- metadata=self._client._rpc_metadata,
- )
-
- for response in response_iterator:
- snapshot = _query_response_to_snapshot(
- response, self._parent, expected_prefix
- )
- if snapshot is not None:
- yield snapshot
-
- def on_snapshot(self, callback):
- """Monitor the documents in this collection that match this query.
-
- This starts a watch on this query using a background thread. The
- provided callback is run on the snapshot of the documents.
-
- Args:
- callback(~.firestore.query.QuerySnapshot): a callback to run when
- a change occurs.
-
- Example:
- from google.cloud import firestore_v1beta1
-
- db = firestore_v1beta1.Client()
- query_ref = db.collection(u'users').where("user", "==", u'Ada')
-
- def on_snapshot(docs, changes, read_time):
- for doc in docs:
- print(u'{} => {}'.format(doc.id, doc.to_dict()))
-
- # Watch this query
- query_watch = query_ref.on_snapshot(on_snapshot)
-
- # Terminate this watch
- query_watch.unsubscribe()
- """
- return Watch.for_query(
- self, callback, document.DocumentSnapshot, document.DocumentReference
- )
-
- def _comparator(self, doc1, doc2):
- _orders = self._orders
-
- # Add implicit sorting by name, using the last specified direction.
- if len(_orders) == 0:
- lastDirection = Query.ASCENDING
- else:
- if _orders[-1].direction == 1:
- lastDirection = Query.ASCENDING
- else:
- lastDirection = Query.DESCENDING
-
- orderBys = list(_orders)
-
- order_pb = query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path="id"),
- direction=_enum_from_direction(lastDirection),
- )
- orderBys.append(order_pb)
-
- for orderBy in orderBys:
- if orderBy.field.field_path == "id":
- # If ordering by docuent id, compare resource paths.
- comp = Order()._compare_to(doc1.reference._path, doc2.reference._path)
- else:
- if (
- orderBy.field.field_path not in doc1._data
- or orderBy.field.field_path not in doc2._data
- ):
- raise ValueError(
- "Can only compare fields that exist in the "
- "DocumentSnapshot. Please include the fields you are "
- "ordering on in your select() call."
- )
- v1 = doc1._data[orderBy.field.field_path]
- v2 = doc2._data[orderBy.field.field_path]
- encoded_v1 = _helpers.encode_value(v1)
- encoded_v2 = _helpers.encode_value(v2)
- comp = Order().compare(encoded_v1, encoded_v2)
-
- if comp != 0:
- # 1 == Ascending, -1 == Descending
- return orderBy.direction * comp
-
- return 0
-
-
-def _enum_from_op_string(op_string):
- """Convert a string representation of a binary operator to an enum.
-
- These enums come from the protobuf message definition
- ``StructuredQuery.FieldFilter.Operator``.
-
- Args:
- op_string (str): A comparison operation in the form of a string.
- Acceptable values are ``<``, ``<=``, ``==``, ``>=``
- and ``>``.
-
- Returns:
- int: The enum corresponding to ``op_string``.
-
- Raises:
- ValueError: If ``op_string`` is not a valid operator.
- """
- try:
- return _COMPARISON_OPERATORS[op_string]
- except KeyError:
- choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys()))
- msg = _BAD_OP_STRING.format(op_string, choices)
- raise ValueError(msg)
-
-
-def _isnan(value):
- """Check if a value is NaN.
-
- This differs from ``math.isnan`` in that **any** input type is
- allowed.
-
- Args:
- value (Any): A value to check for NaN-ness.
-
- Returns:
- bool: Indicates if the value is the NaN float.
- """
- if isinstance(value, float):
- return math.isnan(value)
- else:
- return False
-
-
-def _enum_from_direction(direction):
- """Convert a string representation of a direction to an enum.
-
- Args:
- direction (str): A direction to order by. Must be one of
- :attr:`~google.cloud.firestore.Query.ASCENDING` or
- :attr:`~google.cloud.firestore.Query.DESCENDING`.
-
- Returns:
- int: The enum corresponding to ``direction``.
-
- Raises:
- ValueError: If ``direction`` is not a valid direction.
- """
- if isinstance(direction, int):
- return direction
-
- if direction == Query.ASCENDING:
- return enums.StructuredQuery.Direction.ASCENDING
- elif direction == Query.DESCENDING:
- return enums.StructuredQuery.Direction.DESCENDING
- else:
- msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING)
- raise ValueError(msg)
-
-
-def _filter_pb(field_or_unary):
- """Convert a specific protobuf filter to the generic filter type.
-
- Args:
- field_or_unary (Union[google.cloud.proto.firestore.v1beta1.\
- query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\
- firestore.v1beta1.query_pb2.StructuredQuery.FieldFilter]): A
- field or unary filter to convert to a generic filter.
-
- Returns:
- google.cloud.firestore_v1beta1.types.\
- StructuredQuery.Filter: A "generic" filter.
-
- Raises:
- ValueError: If ``field_or_unary`` is not a field or unary filter.
- """
- if isinstance(field_or_unary, query_pb2.StructuredQuery.FieldFilter):
- return query_pb2.StructuredQuery.Filter(field_filter=field_or_unary)
- elif isinstance(field_or_unary, query_pb2.StructuredQuery.UnaryFilter):
- return query_pb2.StructuredQuery.Filter(unary_filter=field_or_unary)
- else:
- raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary)
-
-
-def _cursor_pb(cursor_pair):
- """Convert a cursor pair to a protobuf.
-
- If ``cursor_pair`` is :data:`None`, just returns :data:`None`.
-
- Args:
- cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of
-
- * a list of field values.
- * a ``before`` flag
-
- Returns:
- Optional[google.cloud.firestore_v1beta1.types.Cursor]: A
- protobuf cursor corresponding to the values.
- """
- if cursor_pair is not None:
- data, before = cursor_pair
- value_pbs = [_helpers.encode_value(value) for value in data]
- return query_pb2.Cursor(values=value_pbs, before=before)
-
-
-def _query_response_to_snapshot(response_pb, collection, expected_prefix):
- """Parse a query response protobuf to a document snapshot.
-
- Args:
- response_pb (google.cloud.proto.firestore.v1beta1.\
- firestore_pb2.RunQueryResponse): A
- collection (~.firestore_v1beta1.collection.CollectionReference): A
- reference to the collection that initiated the query.
- expected_prefix (str): The expected prefix for fully-qualified
- document names returned in the query results. This can be computed
- directly from ``collection`` via :meth:`_parent_info`.
-
- Returns:
- Optional[~.firestore.document.DocumentSnapshot]: A
- snapshot of the data returned in the query. If ``response_pb.document``
- is not set, the snapshot will be :data:`None`.
- """
- if not response_pb.HasField("document"):
- return None
-
- document_id = _helpers.get_doc_id(response_pb.document, expected_prefix)
- reference = collection.document(document_id)
- data = _helpers.decode_dict(response_pb.document.fields, collection._client)
- snapshot = document.DocumentSnapshot(
- reference,
- data,
- exists=True,
- read_time=response_pb.read_time,
- create_time=response_pb.document.create_time,
- update_time=response_pb.document.update_time,
- )
- return snapshot
diff --git a/google/cloud/firestore_v1beta1/transaction.py b/google/cloud/firestore_v1beta1/transaction.py
deleted file mode 100644
index 9a37f18d80..0000000000
--- a/google/cloud/firestore_v1beta1/transaction.py
+++ /dev/null
@@ -1,409 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for applying Google Cloud Firestore changes in a transaction."""
-
-
-import random
-import time
-
-import six
-
-from google.api_core import exceptions
-from google.cloud.firestore_v1beta1 import batch
-from google.cloud.firestore_v1beta1 import types
-
-
-MAX_ATTEMPTS = 5
-"""int: Default number of transaction attempts (with retries)."""
-_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}."
-_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}."
-_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back")
-_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed")
-_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction."
-_INITIAL_SLEEP = 1.0
-"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`."""
-_MAX_SLEEP = 30.0
-"""float: Eventual "max" sleep time. To be used in :func:`_sleep`."""
-_MULTIPLIER = 2.0
-"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`."""
-_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts."
-_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried."
-
-
-class Transaction(batch.WriteBatch):
- """Accumulate read-and-write operations to be sent in a transaction.
-
- Args:
- client (~.firestore_v1beta1.client.Client): The client that
- created this transaction.
- max_attempts (Optional[int]): The maximum number of attempts for
- the transaction (i.e. allowing retries). Defaults to
- :attr:`~google.cloud.firestore_v1beta1.transaction.MAX_ATTEMPTS`.
- read_only (Optional[bool]): Flag indicating if the transaction
- should be read-only or should allow writes. Defaults to
- :data:`False`.
- """
-
- def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False):
- super(Transaction, self).__init__(client)
- self._max_attempts = max_attempts
- self._read_only = read_only
- self._id = None
-
- def _add_write_pbs(self, write_pbs):
- """Add `Write`` protobufs to this transaction.
-
- Args:
- write_pbs (List[google.cloud.proto.firestore.v1beta1.\
- write_pb2.Write]): A list of write protobufs to be added.
-
- Raises:
- ValueError: If this transaction is read-only.
- """
- if self._read_only:
- raise ValueError(_WRITE_READ_ONLY)
-
- super(Transaction, self)._add_write_pbs(write_pbs)
-
- def _options_protobuf(self, retry_id):
- """Convert the current object to protobuf.
-
- The ``retry_id`` value is used when retrying a transaction that
- failed (e.g. due to contention). It is intended to be the "first"
- transaction that failed (i.e. if multiple retries are needed).
-
- Args:
- retry_id (Union[bytes, NoneType]): Transaction ID of a transaction
- to be retried.
-
- Returns:
- Optional[google.cloud.firestore_v1beta1.types.TransactionOptions]:
- The protobuf ``TransactionOptions`` if ``read_only==True`` or if
- there is a transaction ID to be retried, else :data:`None`.
-
- Raises:
- ValueError: If ``retry_id`` is not :data:`None` but the
- transaction is read-only.
- """
- if retry_id is not None:
- if self._read_only:
- raise ValueError(_CANT_RETRY_READ_ONLY)
-
- return types.TransactionOptions(
- read_write=types.TransactionOptions.ReadWrite(
- retry_transaction=retry_id
- )
- )
- elif self._read_only:
- return types.TransactionOptions(
- read_only=types.TransactionOptions.ReadOnly()
- )
- else:
- return None
-
- @property
- def in_progress(self):
- """Determine if this transaction has already begun.
-
- Returns:
- bool: Indicates if the transaction has started.
- """
- return self._id is not None
-
- @property
- def id(self):
- """Get the current transaction ID.
-
- Returns:
- Optional[bytes]: The transaction ID (or :data:`None` if the
- current transaction is not in progress).
- """
- return self._id
-
- def _begin(self, retry_id=None):
- """Begin the transaction.
-
- Args:
- retry_id (Optional[bytes]): Transaction ID of a transaction to be
- retried.
-
- Raises:
- ValueError: If the current transaction has already begun.
- """
- if self.in_progress:
- msg = _CANT_BEGIN.format(self._id)
- raise ValueError(msg)
-
- transaction_response = self._client._firestore_api.begin_transaction(
- self._client._database_string,
- options_=self._options_protobuf(retry_id),
- metadata=self._client._rpc_metadata,
- )
- self._id = transaction_response.transaction
-
- def _clean_up(self):
- """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``.
-
- This intended to occur on success or failure of the associated RPCs.
- """
- self._write_pbs = []
- self._id = None
-
- def _rollback(self):
- """Roll back the transaction.
-
- Raises:
- ValueError: If no transaction is in progress.
- """
- if not self.in_progress:
- raise ValueError(_CANT_ROLLBACK)
-
- try:
- # NOTE: The response is just ``google.protobuf.Empty``.
- self._client._firestore_api.rollback(
- self._client._database_string,
- self._id,
- metadata=self._client._rpc_metadata,
- )
- finally:
- self._clean_up()
-
- def _commit(self):
- """Transactionally commit the changes accumulated.
-
- Returns:
- List[google.cloud.proto.firestore.v1beta1.\
- write_pb2.WriteResult, ...]: The write results corresponding
- to the changes committed, returned in the same order as the
- changes were applied to this transaction. A write result contains
- an ``update_time`` field.
-
- Raises:
- ValueError: If no transaction is in progress.
- """
- if not self.in_progress:
- raise ValueError(_CANT_COMMIT)
-
- commit_response = _commit_with_retry(self._client, self._write_pbs, self._id)
-
- self._clean_up()
- return list(commit_response.write_results)
-
-
-class _Transactional(object):
- """Provide a callable object to use as a transactional decorater.
-
- This is surfaced via
- :func:`~google.cloud.firestore_v1beta1.transaction.transactional`.
-
- Args:
- to_wrap (Callable[~.firestore_v1beta1.transaction.Transaction, \
- Any]): A callable that should be run (and retried) in a
- transaction.
- """
-
- def __init__(self, to_wrap):
- self.to_wrap = to_wrap
- self.current_id = None
- """Optional[bytes]: The current transaction ID."""
- self.retry_id = None
- """Optional[bytes]: The ID of the first attempted transaction."""
-
- def _reset(self):
- """Unset the transaction IDs."""
- self.current_id = None
- self.retry_id = None
-
- def _pre_commit(self, transaction, *args, **kwargs):
- """Begin transaction and call the wrapped callable.
-
- If the callable raises an exception, the transaction will be rolled
- back. If not, the transaction will be "ready" for ``Commit`` (i.e.
- it will have staged writes).
-
- Args:
- transaction (~.firestore_v1beta1.transaction.Transaction): A
- transaction to execute the callable within.
- args (Tuple[Any, ...]): The extra positional arguments to pass
- along to the wrapped callable.
- kwargs (Dict[str, Any]): The extra keyword arguments to pass
- along to the wrapped callable.
-
- Returns:
- Any: result of the wrapped callable.
-
- Raises:
- Exception: Any failure caused by ``to_wrap``.
- """
- # Force the ``transaction`` to be not "in progress".
- transaction._clean_up()
- transaction._begin(retry_id=self.retry_id)
-
- # Update the stored transaction IDs.
- self.current_id = transaction._id
- if self.retry_id is None:
- self.retry_id = self.current_id
- try:
- return self.to_wrap(transaction, *args, **kwargs)
- except: # noqa
- # NOTE: If ``rollback`` fails this will lose the information
- # from the original failure.
- transaction._rollback()
- raise
-
- def _maybe_commit(self, transaction):
- """Try to commit the transaction.
-
- If the transaction is read-write and the ``Commit`` fails with the
- ``ABORTED`` status code, it will be retried. Any other failure will
- not be caught.
-
- Args:
- transaction (~.firestore_v1beta1.transaction.Transaction): The
- transaction to be ``Commit``-ed.
-
- Returns:
- bool: Indicating if the commit succeeded.
- """
- try:
- transaction._commit()
- return True
- except exceptions.GoogleAPICallError as exc:
- if transaction._read_only:
- raise
-
- if isinstance(exc, exceptions.Aborted):
- # If a read-write transaction returns ABORTED, retry.
- return False
- else:
- raise
-
- def __call__(self, transaction, *args, **kwargs):
- """Execute the wrapped callable within a transaction.
-
- Args:
- transaction (~.firestore_v1beta1.transaction.Transaction): A
- transaction to execute the callable within.
- args (Tuple[Any, ...]): The extra positional arguments to pass
- along to the wrapped callable.
- kwargs (Dict[str, Any]): The extra keyword arguments to pass
- along to the wrapped callable.
-
- Returns:
- Any: The result of the wrapped callable.
-
- Raises:
- ValueError: If the transaction does not succeed in
- ``max_attempts``.
- """
- self._reset()
-
- for attempt in six.moves.xrange(transaction._max_attempts):
- result = self._pre_commit(transaction, *args, **kwargs)
- succeeded = self._maybe_commit(transaction)
- if succeeded:
- return result
-
- # Subsequent requests will use the failed transaction ID as part of
- # the ``BeginTransactionRequest`` when restarting this transaction
- # (via ``options.retry_transaction``). This preserves the "spot in
- # line" of the transaction, so exponential backoff is not required
- # in this case.
-
- transaction._rollback()
- msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts)
- raise ValueError(msg)
-
-
-def transactional(to_wrap):
- """Decorate a callable so that it runs in a transaction.
-
- Args:
- to_wrap (Callable[~.firestore_v1beta1.transaction.Transaction, \
- Any]): A callable that should be run (and retried) in a
- transaction.
-
- Returns:
- Callable[~.firestore_v1beta1.transaction.Transaction, Any]: the
- wrapped callable.
- """
- return _Transactional(to_wrap)
-
-
-def _commit_with_retry(client, write_pbs, transaction_id):
- """Call ``Commit`` on the GAPIC client with retry / sleep.
-
- Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level
- retry is handled by the underlying GAPICd client, but in this case it
- doesn't because ``Commit`` is not always idempotent. But here we know it
- is "idempotent"-like because it has a transaction ID. We also need to do
- our own retry to special-case the ``INVALID_ARGUMENT`` error.
-
- Args:
- client (~.firestore_v1beta1.client.Client): A client with
- GAPIC client and configuration details.
- write_pbs (List[google.cloud.proto.firestore.v1beta1.\
- write_pb2.Write, ...]): A ``Write`` protobuf instance to
- be committed.
- transaction_id (bytes): ID of an existing transaction that
- this commit will run in.
-
- Returns:
- google.cloud.firestore_v1beta1.types.CommitResponse:
- The protobuf response from ``Commit``.
-
- Raises:
- ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable
- exception is encountered.
- """
- current_sleep = _INITIAL_SLEEP
- while True:
- try:
- return client._firestore_api.commit(
- client._database_string,
- write_pbs,
- transaction=transaction_id,
- metadata=client._rpc_metadata,
- )
- except exceptions.ServiceUnavailable:
- # Retry
- pass
-
- current_sleep = _sleep(current_sleep)
-
-
-def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER):
- """Sleep and produce a new sleep time.
-
- .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\
- 2015/03/backoff.html
-
- Select a duration between zero and ``current_sleep``. It might seem
- counterintuitive to have so much jitter, but
- `Exponential Backoff And Jitter`_ argues that "full jitter" is
- the best strategy.
-
- Args:
- current_sleep (float): The current "max" for sleep interval.
- max_sleep (Optional[float]): Eventual "max" sleep time
- multiplier (Optional[float]): Multiplier for exponential backoff.
-
- Returns:
- float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever
- is smaller)
- """
- actual_sleep = random.uniform(0.0, current_sleep)
- time.sleep(actual_sleep)
- return min(multiplier * current_sleep, max_sleep)
diff --git a/google/cloud/firestore_v1beta1/transforms.py b/google/cloud/firestore_v1beta1/transforms.py
deleted file mode 100644
index 4a64cf9ec3..0000000000
--- a/google/cloud/firestore_v1beta1/transforms.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpful constants to use for Google Cloud Firestore."""
-
-
-class Sentinel(object):
- """Sentinel objects used to signal special handling."""
-
- __slots__ = ("description",)
-
- def __init__(self, description):
- self.description = description
-
- def __repr__(self):
- return "Sentinel: {}".format(self.description)
-
-
-DELETE_FIELD = Sentinel("Value used to delete a field in a document.")
-
-
-SERVER_TIMESTAMP = Sentinel(
- "Value used to set a document field to the server timestamp."
-)
-
-
-class _ValueList(object):
- """Read-only list of values.
-
- Args:
- values (List | Tuple): values held in the helper.
- """
-
- slots = ("_values",)
-
- def __init__(self, values):
- if not isinstance(values, (list, tuple)):
- raise ValueError("'values' must be a list or tuple.")
-
- if len(values) == 0:
- raise ValueError("'values' must be non-empty.")
-
- self._values = list(values)
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return NotImplemented
- return self._values == other._values
-
- @property
- def values(self):
- """Values to append.
-
- Returns (List):
- values to be appended by the transform.
- """
- return self._values
-
-
-class ArrayUnion(_ValueList):
- """Field transform: appends missing values to an array field.
-
- See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1beta1#google.firestore.v1beta1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1beta1.ArrayValue.google.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements
-
- Args:
- values (List | Tuple): values to append.
- """
-
-
-class ArrayRemove(_ValueList):
- """Field transform: remove values from an array field.
-
- See:
- https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1beta1#google.firestore.v1beta1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1beta1.ArrayValue.google.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array
-
- Args:
- values (List | Tuple): values to remove.
- """
diff --git a/google/cloud/firestore_v1beta1/types.py b/google/cloud/firestore_v1beta1/types.py
deleted file mode 100644
index 90c03b8aba..0000000000
--- a/google/cloud/firestore_v1beta1/types.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-import sys
-
-from google.api import http_pb2
-from google.protobuf import any_pb2
-from google.protobuf import descriptor_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import struct_pb2
-from google.protobuf import timestamp_pb2
-from google.protobuf import wrappers_pb2
-from google.rpc import status_pb2
-from google.type import latlng_pb2
-
-from google.api_core.protobuf_helpers import get_messages
-from google.cloud.firestore_v1beta1.proto import common_pb2
-from google.cloud.firestore_v1beta1.proto import document_pb2
-from google.cloud.firestore_v1beta1.proto import firestore_pb2
-from google.cloud.firestore_v1beta1.proto import query_pb2
-from google.cloud.firestore_v1beta1.proto import write_pb2
-
-
-_shared_modules = [
- http_pb2,
- any_pb2,
- descriptor_pb2,
- empty_pb2,
- struct_pb2,
- timestamp_pb2,
- wrappers_pb2,
- status_pb2,
- latlng_pb2,
-]
-
-_local_modules = [common_pb2, document_pb2, firestore_pb2, query_pb2, write_pb2]
-
-names = []
-
-for module in _shared_modules:
- for name, message in get_messages(module).items():
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-for module in _local_modules:
- for name, message in get_messages(module).items():
- message.__module__ = "google.cloud.firestore_v1beta1.types"
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-__all__ = tuple(sorted(names))
diff --git a/google/cloud/firestore_v1beta1/watch.py b/google/cloud/firestore_v1beta1/watch.py
deleted file mode 100644
index 63ded0d2d2..0000000000
--- a/google/cloud/firestore_v1beta1/watch.py
+++ /dev/null
@@ -1,722 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import collections
-import threading
-import datetime
-from enum import Enum
-import functools
-
-import pytz
-
-from google.api_core.bidi import ResumableBidiRpc
-from google.api_core.bidi import BackgroundConsumer
-from google.cloud.firestore_v1beta1.proto import firestore_pb2
-from google.cloud.firestore_v1beta1 import _helpers
-
-from google.api_core import exceptions
-
-import grpc
-
-"""Python client for Google Cloud Firestore Watch."""
-
-_LOGGER = logging.getLogger(__name__)
-
-WATCH_TARGET_ID = 0x5079 # "Py"
-
-GRPC_STATUS_CODE = {
- "OK": 0,
- "CANCELLED": 1,
- "UNKNOWN": 2,
- "INVALID_ARGUMENT": 3,
- "DEADLINE_EXCEEDED": 4,
- "NOT_FOUND": 5,
- "ALREADY_EXISTS": 6,
- "PERMISSION_DENIED": 7,
- "UNAUTHENTICATED": 16,
- "RESOURCE_EXHAUSTED": 8,
- "FAILED_PRECONDITION": 9,
- "ABORTED": 10,
- "OUT_OF_RANGE": 11,
- "UNIMPLEMENTED": 12,
- "INTERNAL": 13,
- "UNAVAILABLE": 14,
- "DATA_LOSS": 15,
- "DO_NOT_USE": -1,
-}
-_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated"
-_RETRYABLE_STREAM_ERRORS = (
- exceptions.DeadlineExceeded,
- exceptions.ServiceUnavailable,
- exceptions.InternalServerError,
- exceptions.Unknown,
- exceptions.GatewayTimeout,
-)
-
-DocTreeEntry = collections.namedtuple("DocTreeEntry", ["value", "index"])
-
-
-class WatchDocTree(object):
- # TODO: Currently this uses a dict. Other implementations us an rbtree.
- # The performance of this implementation should be investigated and may
- # require modifying the underlying datastructure to a rbtree.
- def __init__(self):
- self._dict = {}
- self._index = 0
-
- def keys(self):
- return list(self._dict.keys())
-
- def _copy(self):
- wdt = WatchDocTree()
- wdt._dict = self._dict.copy()
- wdt._index = self._index
- self = wdt
- return self
-
- def insert(self, key, value):
- self = self._copy()
- self._dict[key] = DocTreeEntry(value, self._index)
- self._index += 1
- return self
-
- def find(self, key):
- return self._dict[key]
-
- def remove(self, key):
- self = self._copy()
- del self._dict[key]
- return self
-
- def __iter__(self):
- for k in self._dict:
- yield k
-
- def __len__(self):
- return len(self._dict)
-
- def __contains__(self, k):
- return k in self._dict
-
-
-class ChangeType(Enum):
- ADDED = 1
- REMOVED = 2
- MODIFIED = 3
-
-
-class DocumentChange(object):
- def __init__(self, type, document, old_index, new_index):
- """DocumentChange
-
- Args:
- type (ChangeType):
- document (document.DocumentSnapshot):
- old_index (int):
- new_index (int):
- """
- # TODO: spec indicated an isEqual param also
- self.type = type
- self.document = document
- self.old_index = old_index
- self.new_index = new_index
-
-
-class WatchResult(object):
- def __init__(self, snapshot, name, change_type):
- self.snapshot = snapshot
- self.name = name
- self.change_type = change_type
-
-
-def _maybe_wrap_exception(exception):
- """Wraps a gRPC exception class, if needed."""
- if isinstance(exception, grpc.RpcError):
- return exceptions.from_grpc_error(exception)
- return exception
-
-
-def document_watch_comparator(doc1, doc2):
- assert doc1 == doc2, "Document watches only support one document."
- return 0
-
-
-class Watch(object):
-
- BackgroundConsumer = BackgroundConsumer # FBO unit tests
- ResumableBidiRpc = ResumableBidiRpc # FBO unit tests
-
- def __init__(
- self,
- document_reference,
- firestore,
- target,
- comparator,
- snapshot_callback,
- document_snapshot_cls,
- document_reference_cls,
- BackgroundConsumer=None, # FBO unit testing
- ResumableBidiRpc=None, # FBO unit testing
- ):
- """
- Args:
- firestore:
- target:
- comparator:
- snapshot_callback: Callback method to process snapshots.
- Args:
- docs (List(DocumentSnapshot)): A callback that returns the
- ordered list of documents stored in this snapshot.
- changes (List(str)): A callback that returns the list of
- changed documents since the last snapshot delivered for
- this watch.
- read_time (string): The ISO 8601 time at which this
- snapshot was obtained.
-
- document_snapshot_cls: instance of DocumentSnapshot
- document_reference_cls: instance of DocumentReference
- """
- self._document_reference = document_reference
- self._firestore = firestore
- self._api = firestore._firestore_api
- self._targets = target
- self._comparator = comparator
- self.DocumentSnapshot = document_snapshot_cls
- self.DocumentReference = document_reference_cls
- self._snapshot_callback = snapshot_callback
- self._closing = threading.Lock()
- self._closed = False
-
- def should_recover(exc): # pragma: NO COVER
- return (
- isinstance(exc, grpc.RpcError)
- and exc.code() == grpc.StatusCode.UNAVAILABLE
- )
-
- initial_request = firestore_pb2.ListenRequest(
- database=self._firestore._database_string, add_target=self._targets
- )
-
- if ResumableBidiRpc is None:
- ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests
-
- self._rpc = ResumableBidiRpc(
- self._api.transport.listen,
- initial_request=initial_request,
- should_recover=should_recover,
- metadata=self._firestore._rpc_metadata,
- )
-
- self._rpc.add_done_callback(self._on_rpc_done)
-
- # Initialize state for on_snapshot
- # The sorted tree of QueryDocumentSnapshots as sent in the last
- # snapshot. We only look at the keys.
- self.doc_tree = WatchDocTree()
-
- # A map of document names to QueryDocumentSnapshots for the last sent
- # snapshot.
- self.doc_map = {}
-
- # The accumulates map of document changes (keyed by document name) for
- # the current snapshot.
- self.change_map = {}
-
- # The current state of the query results.
- self.current = False
-
- # We need this to track whether we've pushed an initial set of changes,
- # since we should push those even when there are no changes, if there
- # aren't docs.
- self.has_pushed = False
-
- # The server assigns and updates the resume token.
- self.resume_token = None
- if BackgroundConsumer is None: # FBO unit tests
- BackgroundConsumer = self.BackgroundConsumer
-
- self._consumer = BackgroundConsumer(self._rpc, self.on_snapshot)
- self._consumer.start()
-
- @property
- def is_active(self):
- """bool: True if this manager is actively streaming.
-
- Note that ``False`` does not indicate this is complete shut down,
- just that it stopped getting new messages.
- """
- return self._consumer is not None and self._consumer.is_active
-
- def close(self, reason=None):
- """Stop consuming messages and shutdown all helper threads.
-
- This method is idempotent. Additional calls will have no effect.
-
- Args:
- reason (Any): The reason to close this. If None, this is considered
- an "intentional" shutdown.
- """
- with self._closing:
- if self._closed:
- return
-
- # Stop consuming messages.
- if self.is_active:
- _LOGGER.debug("Stopping consumer.")
- self._consumer.stop()
- self._consumer = None
-
- self._rpc.close()
- self._rpc = None
- self._closed = True
- _LOGGER.debug("Finished stopping manager.")
-
- if reason:
- # Raise an exception if a reason is provided
- _LOGGER.debug("reason for closing: %s" % reason)
- if isinstance(reason, Exception):
- raise reason
- raise RuntimeError(reason)
-
- def _on_rpc_done(self, future):
- """Triggered whenever the underlying RPC terminates without recovery.
-
- This is typically triggered from one of two threads: the background
- consumer thread (when calling ``recv()`` produces a non-recoverable
- error) or the grpc management thread (when cancelling the RPC).
-
- This method is *non-blocking*. It will start another thread to deal
- with shutting everything down. This is to prevent blocking in the
- background consumer and preventing it from being ``joined()``.
- """
- _LOGGER.info("RPC termination has signaled manager shutdown.")
- future = _maybe_wrap_exception(future)
- thread = threading.Thread(
- name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future}
- )
- thread.daemon = True
- thread.start()
-
- def unsubscribe(self):
- self.close()
-
- @classmethod
- def for_document(
- cls,
- document_ref,
- snapshot_callback,
- snapshot_class_instance,
- reference_class_instance,
- ):
- """
- Creates a watch snapshot listener for a document. snapshot_callback
- receives a DocumentChange object, but may also start to get
- targetChange and such soon
-
- Args:
- document_ref: Reference to Document
- snapshot_callback: callback to be called on snapshot
- snapshot_class_instance: instance of DocumentSnapshot to make
- snapshots with to pass to snapshot_callback
- reference_class_instance: instance of DocumentReference to make
- references
-
- """
- return cls(
- document_ref,
- document_ref._client,
- {
- "documents": {"documents": [document_ref._document_path]},
- "target_id": WATCH_TARGET_ID,
- },
- document_watch_comparator,
- snapshot_callback,
- snapshot_class_instance,
- reference_class_instance,
- )
-
- @classmethod
- def for_query(
- cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance
- ):
- query_target = firestore_pb2.Target.QueryTarget(
- parent=query._client._database_string, structured_query=query._to_protobuf()
- )
-
- return cls(
- query,
- query._client,
- {"query": query_target, "target_id": WATCH_TARGET_ID},
- query._comparator,
- snapshot_callback,
- snapshot_class_instance,
- reference_class_instance,
- )
-
- def _on_snapshot_target_change_no_change(self, proto):
- _LOGGER.debug("on_snapshot: target change: NO_CHANGE")
- change = proto.target_change
-
- no_target_ids = change.target_ids is None or len(change.target_ids) == 0
- if no_target_ids and change.read_time and self.current:
- # TargetChange.CURRENT followed by TargetChange.NO_CHANGE
- # signals a consistent state. Invoke the onSnapshot
- # callback as specified by the user.
- self.push(change.read_time, change.resume_token)
-
- def _on_snapshot_target_change_add(self, proto):
- _LOGGER.debug("on_snapshot: target change: ADD")
- target_id = proto.target_change.target_ids[0]
- if target_id != WATCH_TARGET_ID:
- raise RuntimeError("Unexpected target ID %s sent by server" % target_id)
-
- def _on_snapshot_target_change_remove(self, proto):
- _LOGGER.debug("on_snapshot: target change: REMOVE")
- change = proto.target_change
-
- code = 13
- message = "internal error"
- if change.cause:
- code = change.cause.code
- message = change.cause.message
-
- message = "Error %s: %s" % (code, message)
-
- raise RuntimeError(message)
-
- def _on_snapshot_target_change_reset(self, proto):
- # Whatever changes have happened so far no longer matter.
- _LOGGER.debug("on_snapshot: target change: RESET")
- self._reset_docs()
-
- def _on_snapshot_target_change_current(self, proto):
- _LOGGER.debug("on_snapshot: target change: CURRENT")
- self.current = True
-
- def on_snapshot(self, proto):
- """
- Called everytime there is a response from listen. Collect changes
- and 'push' the changes in a batch to the customer when we receive
- 'current' from the listen response.
-
- Args:
- listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`):
- Callback method that receives a object to
- """
- TargetChange = firestore_pb2.TargetChange
-
- target_changetype_dispatch = {
- TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change,
- TargetChange.ADD: self._on_snapshot_target_change_add,
- TargetChange.REMOVE: self._on_snapshot_target_change_remove,
- TargetChange.RESET: self._on_snapshot_target_change_reset,
- TargetChange.CURRENT: self._on_snapshot_target_change_current,
- }
-
- target_change = proto.target_change
- if str(target_change):
- target_change_type = target_change.target_change_type
- _LOGGER.debug("on_snapshot: target change: " + str(target_change_type))
- meth = target_changetype_dispatch.get(target_change_type)
- if meth is None:
- _LOGGER.info(
- "on_snapshot: Unknown target change " + str(target_change_type)
- )
- self.close(
- reason="Unknown target change type: %s " % str(target_change_type)
- )
- else:
- try:
- meth(proto)
- except Exception as exc2:
- _LOGGER.debug("meth(proto) exc: " + str(exc2))
- raise
-
- # NOTE:
- # in other implementations, such as node, the backoff is reset here
- # in this version bidi rpc is just used and will control this.
-
- elif str(proto.document_change):
- _LOGGER.debug("on_snapshot: document change")
-
- # No other target_ids can show up here, but we still need to see
- # if the targetId was in the added list or removed list.
- target_ids = proto.document_change.target_ids or []
- removed_target_ids = proto.document_change.removed_target_ids or []
- changed = False
- removed = False
-
- if WATCH_TARGET_ID in target_ids:
- changed = True
-
- if WATCH_TARGET_ID in removed_target_ids:
- removed = True
-
- if changed:
- _LOGGER.debug("on_snapshot: document change: CHANGED")
-
- # google.cloud.firestore_v1beta1.types.DocumentChange
- document_change = proto.document_change
- # google.cloud.firestore_v1beta1.types.Document
- document = document_change.document
-
- data = _helpers.decode_dict(document.fields, self._firestore)
-
- # Create a snapshot. As Document and Query objects can be
- # passed we need to get a Document Reference in a more manual
- # fashion than self._document_reference
- document_name = document.name
- db_str = self._firestore._database_string
- db_str_documents = db_str + "/documents/"
- if document_name.startswith(db_str_documents):
- document_name = document_name[len(db_str_documents) :]
-
- document_ref = self._firestore.document(document_name)
-
- snapshot = self.DocumentSnapshot(
- reference=document_ref,
- data=data,
- exists=True,
- read_time=None,
- create_time=document.create_time,
- update_time=document.update_time,
- )
- self.change_map[document.name] = snapshot
-
- elif removed:
- _LOGGER.debug("on_snapshot: document change: REMOVED")
- document = proto.document_change.document
- self.change_map[document.name] = ChangeType.REMOVED
-
- # NB: document_delete and document_remove (as far as we, the client,
- # are concerned) are functionally equivalent
-
- elif str(proto.document_delete):
- _LOGGER.debug("on_snapshot: document change: DELETE")
- name = proto.document_delete.document
- self.change_map[name] = ChangeType.REMOVED
-
- elif str(proto.document_remove):
- _LOGGER.debug("on_snapshot: document change: REMOVE")
- name = proto.document_remove.document
- self.change_map[name] = ChangeType.REMOVED
-
- elif proto.filter:
- _LOGGER.debug("on_snapshot: filter update")
- if proto.filter.count != self._current_size():
- # We need to remove all the current results.
- self._reset_docs()
- # The filter didn't match, so re-issue the query.
- # TODO: reset stream method?
- # self._reset_stream();
-
- else:
- _LOGGER.debug("UNKNOWN TYPE. UHOH")
- self.close(reason=ValueError("Unknown listen response type: %s" % proto))
-
- def push(self, read_time, next_resume_token):
- """
- Assembles a new snapshot from the current set of changes and invokes
- the user's callback. Clears the current changes on completion.
- """
- deletes, adds, updates = Watch._extract_changes(
- self.doc_map, self.change_map, read_time
- )
-
- updated_tree, updated_map, appliedChanges = self._compute_snapshot(
- self.doc_tree, self.doc_map, deletes, adds, updates
- )
-
- if not self.has_pushed or len(appliedChanges):
- # TODO: It is possible in the future we will have the tree order
- # on insert. For now, we sort here.
- key = functools.cmp_to_key(self._comparator)
- keys = sorted(updated_tree.keys(), key=key)
-
- self._snapshot_callback(
- keys,
- appliedChanges,
- datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc),
- )
- self.has_pushed = True
-
- self.doc_tree = updated_tree
- self.doc_map = updated_map
- self.change_map.clear()
- self.resume_token = next_resume_token
-
- @staticmethod
- def _extract_changes(doc_map, changes, read_time):
- deletes = []
- adds = []
- updates = []
-
- for name, value in changes.items():
- if value == ChangeType.REMOVED:
- if name in doc_map:
- deletes.append(name)
- elif name in doc_map:
- if read_time is not None:
- value.read_time = read_time
- updates.append(value)
- else:
- if read_time is not None:
- value.read_time = read_time
- adds.append(value)
-
- return (deletes, adds, updates)
-
- def _compute_snapshot(
- self, doc_tree, doc_map, delete_changes, add_changes, update_changes
- ):
- updated_tree = doc_tree
- updated_map = doc_map
-
- assert len(doc_tree) == len(doc_map), (
- "The document tree and document map should have the same "
- + "number of entries."
- )
-
- def delete_doc(name, updated_tree, updated_map):
- """
- Applies a document delete to the document tree and document map.
- Returns the corresponding DocumentChange event.
- """
- assert name in updated_map, "Document to delete does not exist"
- old_document = updated_map.get(name)
- # TODO: If a document doesn't exist this raises IndexError. Handle?
- existing = updated_tree.find(old_document)
- old_index = existing.index
- updated_tree = updated_tree.remove(old_document)
- del updated_map[name]
- return (
- DocumentChange(ChangeType.REMOVED, old_document, old_index, -1),
- updated_tree,
- updated_map,
- )
-
- def add_doc(new_document, updated_tree, updated_map):
- """
- Applies a document add to the document tree and the document map.
- Returns the corresponding DocumentChange event.
- """
- name = new_document.reference._document_path
- assert name not in updated_map, "Document to add already exists"
- updated_tree = updated_tree.insert(new_document, None)
- new_index = updated_tree.find(new_document).index
- updated_map[name] = new_document
- return (
- DocumentChange(ChangeType.ADDED, new_document, -1, new_index),
- updated_tree,
- updated_map,
- )
-
- def modify_doc(new_document, updated_tree, updated_map):
- """
- Applies a document modification to the document tree and the
- document map.
- Returns the DocumentChange event for successful modifications.
- """
- name = new_document.reference._document_path
- assert name in updated_map, "Document to modify does not exist"
- old_document = updated_map.get(name)
- if old_document.update_time != new_document.update_time:
- remove_change, updated_tree, updated_map = delete_doc(
- name, updated_tree, updated_map
- )
- add_change, updated_tree, updated_map = add_doc(
- new_document, updated_tree, updated_map
- )
- return (
- DocumentChange(
- ChangeType.MODIFIED,
- new_document,
- remove_change.old_index,
- add_change.new_index,
- ),
- updated_tree,
- updated_map,
- )
-
- return None, updated_tree, updated_map
-
- # Process the sorted changes in the order that is expected by our
- # clients (removals, additions, and then modifications). We also need
- # to sort the individual changes to assure that old_index/new_index
- # keep incrementing.
- appliedChanges = []
-
- key = functools.cmp_to_key(self._comparator)
-
- # Deletes are sorted based on the order of the existing document.
- delete_changes = sorted(delete_changes, key=key)
- for name in delete_changes:
- change, updated_tree, updated_map = delete_doc(
- name, updated_tree, updated_map
- )
- appliedChanges.append(change)
-
- add_changes = sorted(add_changes, key=key)
- _LOGGER.debug("walk over add_changes")
- for snapshot in add_changes:
- _LOGGER.debug("in add_changes")
- change, updated_tree, updated_map = add_doc(
- snapshot, updated_tree, updated_map
- )
- appliedChanges.append(change)
-
- update_changes = sorted(update_changes, key=key)
- for snapshot in update_changes:
- change, updated_tree, updated_map = modify_doc(
- snapshot, updated_tree, updated_map
- )
- if change is not None:
- appliedChanges.append(change)
-
- assert len(updated_tree) == len(updated_map), (
- "The update document "
- + "tree and document map should have the same number of entries."
- )
- return (updated_tree, updated_map, appliedChanges)
-
- def _affects_target(self, target_ids, current_id):
- if target_ids is None:
- return True
-
- return current_id in target_ids
-
- def _current_size(self):
- """
- Returns the current count of all documents, including the changes from
- the current changeMap.
- """
- deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None)
- return len(self.doc_map) + len(adds) - len(deletes)
-
- def _reset_docs(self):
- """
- Helper to clear the docs on RESET or filter mismatch.
- """
- _LOGGER.debug("resetting documents")
- self.change_map.clear()
- self.resume_token = None
-
- # Mark each document as deleted. If documents are not deleted
- # they will be sent again by the server.
- for snapshot in self.doc_tree.keys():
- name = snapshot.reference._document_path
- self.change_map[name] = ChangeType.REMOVED
-
- self.current = False
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000000..4505b48543
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,3 @@
+[mypy]
+python_version = 3.6
+namespace_packages = True
diff --git a/noxfile.py b/noxfile.py
index facb0bb995..0f79223646 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -23,14 +23,15 @@
import nox
-BLACK_VERSION = "black==19.3b0"
+PYTYPE_VERSION = "pytype==2020.7.24"
+BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-if os.path.exists("samples"):
- BLACK_PATHS.append("samples")
+DEFAULT_PYTHON_VERSION = "3.9"
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.9"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
-
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
"""Run linters.
@@ -38,7 +39,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -53,10 +56,20 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
@nox.session(python="3.7")
+def pytype(session):
+ """Run pytype
+ """
+ session.install(PYTYPE_VERSION)
+ session.run("pytype",)
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
@@ -65,6 +78,8 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
+ session.install("pytest-asyncio", "aiounittest")
+
session.install("mock", "pytest", "pytest-cov")
session.install("-e", ".")
@@ -83,18 +98,21 @@ def default(session):
*session.posargs,
)
-
-@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.7"])
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
@@ -110,7 +128,9 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest", "google-cloud-testutils")
+ session.install(
+ "mock", "pytest", "pytest-asyncio", "google-cloud-testutils",
+ )
session.install("-e", ".")
# Run py.test against the system tests.
@@ -120,7 +140,7 @@ def system(session):
session.run("py.test", "--verbose", system_test_folder_path, *session.posargs)
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -128,17 +148,17 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=99")
+ session.run("coverage", "report", "--show-missing", "--fail-under=97")
session.run("coverage", "erase")
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
- session.install("sphinx<3.0.0", "alabaster", "recommonmark")
+ session.install("sphinx", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -153,3 +173,38 @@ def docs(session):
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md
new file mode 100644
index 0000000000..55c97b32f4
--- /dev/null
+++ b/samples/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md
\ No newline at end of file
diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md
new file mode 100644
index 0000000000..34c882b6f1
--- /dev/null
+++ b/samples/CONTRIBUTING.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md
\ No newline at end of file
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 0000000000..21f6d2a26d
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/fixup_firestore_admin_v1_keywords.py b/scripts/fixup_firestore_admin_v1_keywords.py
new file mode 100644
index 0000000000..e9341f0473
--- /dev/null
+++ b/scripts/fixup_firestore_admin_v1_keywords.py
@@ -0,0 +1,192 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import os
+
+try:
+ import libcst as cst
+except ImportError:
+ raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.')
+
+
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+ predicate: Callable[[Any], bool],
+ iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+ """A stable, out-of-place partition."""
+ results = ([], [])
+
+ for i in iterator:
+ results[int(predicate(i))].append(i)
+
+ # Returns trueList, falseList
+ return results[1], results[0]
+
+
+class firestore_adminCallTransformer(cst.CSTTransformer):
+ CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+ METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'create_index': ('parent', 'index', ),
+ 'delete_index': ('name', ),
+ 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', ),
+ 'get_field': ('name', ),
+ 'get_index': ('name', ),
+ 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', ),
+ 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ),
+ 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ),
+ 'update_field': ('field', 'update_mask', ),
+
+ }
+
+ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+ try:
+ key = original.func.attr.value
+ kword_params = self.METHOD_TO_PARAMS[key]
+ except (AttributeError, KeyError):
+ # Either not a method from the API or too convoluted to be sure.
+ return updated
+
+ # If the existing code is valid, keyword args come after positional args.
+ # Therefore, all positional args must map to the first parameters.
+ args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+ if any(k.keyword.value == "request" for k in kwargs):
+ # We've already fixed this file, don't fix it again.
+ return updated
+
+ kwargs, ctrl_kwargs = partition(
+ lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ kwargs
+ )
+
+ args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+ ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+ for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+ request_arg = cst.Arg(
+ value=cst.Dict([
+ cst.DictElement(
+ cst.SimpleString("'{}'".format(name)),
+ cst.Element(value=arg.value)
+ )
+ # Note: the args + kwargs looks silly, but keep in mind that
+ # the control parameters had to be stripped out, and that
+ # those could have been passed positionally or by keyword.
+ for name, arg in zip(kword_params, args + kwargs)]),
+ keyword=cst.Name("request")
+ )
+
+ return updated.with_changes(
+ args=[request_arg] + ctrl_kwargs
+ )
+
+
+def fix_files(
+ in_dir: pathlib.Path,
+ out_dir: pathlib.Path,
+ *,
+ transformer=firestore_adminCallTransformer(),
+):
+ """Duplicate the input dir to the output dir, fixing file method calls.
+
+ Preconditions:
+ * in_dir is a real directory
+ * out_dir is a real, empty directory
+ """
+ pyfile_gen = (
+ pathlib.Path(os.path.join(root, f))
+ for root, _, files in os.walk(in_dir)
+ for f in files if os.path.splitext(f)[1] == ".py"
+ )
+
+ for fpath in pyfile_gen:
+ with open(fpath, 'r') as f:
+ src = f.read()
+
+ # Parse the code and insert method call fixes.
+ tree = cst.parse_module(src)
+ updated = tree.visit(transformer)
+
+ # Create the path and directory structure for the new file.
+ updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+ updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate the updated source file at the corresponding path.
+ with open(updated_path, 'w') as f:
+ f.write(updated.code)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="""Fix up source that uses the firestore_admin client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+ parameters in client method calls to keyword based parameters.
+ Cases where it WILL FAIL include
+ A) * or ** expansion in a method call.
+ B) Calls via function or method alias (includes free function calls)
+ C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+ These all constitute false negatives. The tool will also detect false
+ positives when an API method shares a name with another method.
+""")
+ parser.add_argument(
+ '-d',
+ '--input-directory',
+ required=True,
+ dest='input_dir',
+ help='the input directory to walk for python files to fix up',
+ )
+ parser.add_argument(
+ '-o',
+ '--output-directory',
+ required=True,
+ dest='output_dir',
+ help='the directory to output files fixed via un-flattening',
+ )
+ args = parser.parse_args()
+ input_dir = pathlib.Path(args.input_dir)
+ output_dir = pathlib.Path(args.output_dir)
+ if not input_dir.is_dir():
+ print(
+ f"input directory '{input_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if not output_dir.is_dir():
+ print(
+ f"output directory '{output_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if os.listdir(output_dir):
+ print(
+ f"output directory '{output_dir}' is not empty",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ fix_files(input_dir, output_dir)
diff --git a/scripts/fixup_firestore_v1_keywords.py b/scripts/fixup_firestore_v1_keywords.py
new file mode 100644
index 0000000000..374b941620
--- /dev/null
+++ b/scripts/fixup_firestore_v1_keywords.py
@@ -0,0 +1,198 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import os
+
+try:
+ import libcst as cst
+except ImportError:
+ raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.')
+
+
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+ predicate: Callable[[Any], bool],
+ iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+ """A stable, out-of-place partition."""
+ results = ([], [])
+
+ for i in iterator:
+ results[int(predicate(i))].append(i)
+
+ # Returns trueList, falseList
+ return results[1], results[0]
+
+
+class firestoreCallTransformer(cst.CSTTransformer):
+ CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+ METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ),
+ 'batch_write': ('database', 'writes', 'labels', ),
+ 'begin_transaction': ('database', 'options', ),
+ 'commit': ('database', 'writes', 'transaction', ),
+ 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ),
+ 'delete_document': ('name', 'current_document', ),
+ 'get_document': ('name', 'mask', 'transaction', 'read_time', ),
+ 'list_collection_ids': ('parent', 'page_size', 'page_token', ),
+ 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ),
+ 'listen': ('database', 'add_target', 'remove_target', 'labels', ),
+ 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', ),
+ 'rollback': ('database', 'transaction', ),
+ 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', ),
+ 'update_document': ('document', 'update_mask', 'mask', 'current_document', ),
+ 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ),
+
+ }
+
+ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+ try:
+ key = original.func.attr.value
+ kword_params = self.METHOD_TO_PARAMS[key]
+ except (AttributeError, KeyError):
+ # Either not a method from the API or too convoluted to be sure.
+ return updated
+
+ # If the existing code is valid, keyword args come after positional args.
+ # Therefore, all positional args must map to the first parameters.
+ args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+ if any(k.keyword.value == "request" for k in kwargs):
+ # We've already fixed this file, don't fix it again.
+ return updated
+
+ kwargs, ctrl_kwargs = partition(
+ lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ kwargs
+ )
+
+ args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+ ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+ for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+ request_arg = cst.Arg(
+ value=cst.Dict([
+ cst.DictElement(
+ cst.SimpleString("'{}'".format(name)),
+ cst.Element(value=arg.value)
+ )
+ # Note: the args + kwargs looks silly, but keep in mind that
+ # the control parameters had to be stripped out, and that
+ # those could have been passed positionally or by keyword.
+ for name, arg in zip(kword_params, args + kwargs)]),
+ keyword=cst.Name("request")
+ )
+
+ return updated.with_changes(
+ args=[request_arg] + ctrl_kwargs
+ )
+
+
+def fix_files(
+ in_dir: pathlib.Path,
+ out_dir: pathlib.Path,
+ *,
+ transformer=firestoreCallTransformer(),
+):
+ """Duplicate the input dir to the output dir, fixing file method calls.
+
+ Preconditions:
+ * in_dir is a real directory
+ * out_dir is a real, empty directory
+ """
+ pyfile_gen = (
+ pathlib.Path(os.path.join(root, f))
+ for root, _, files in os.walk(in_dir)
+ for f in files if os.path.splitext(f)[1] == ".py"
+ )
+
+ for fpath in pyfile_gen:
+ with open(fpath, 'r') as f:
+ src = f.read()
+
+ # Parse the code and insert method call fixes.
+ tree = cst.parse_module(src)
+ updated = tree.visit(transformer)
+
+ # Create the path and directory structure for the new file.
+ updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+ updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate the updated source file at the corresponding path.
+ with open(updated_path, 'w') as f:
+ f.write(updated.code)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="""Fix up source that uses the firestore client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+ parameters in client method calls to keyword based parameters.
+ Cases where it WILL FAIL include
+ A) * or ** expansion in a method call.
+ B) Calls via function or method alias (includes free function calls)
+ C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+ These all constitute false negatives. The tool will also detect false
+ positives when an API method shares a name with another method.
+""")
+ parser.add_argument(
+ '-d',
+ '--input-directory',
+ required=True,
+ dest='input_dir',
+ help='the input directory to walk for python files to fix up',
+ )
+ parser.add_argument(
+ '-o',
+ '--output-directory',
+ required=True,
+ dest='output_dir',
+ help='the directory to output files fixed via un-flattening',
+ )
+ args = parser.parse_args()
+ input_dir = pathlib.Path(args.input_dir)
+ output_dir = pathlib.Path(args.output_dir)
+ if not input_dir.is_dir():
+ print(
+ f"input directory '{input_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if not output_dir.is_dir():
+ print(
+ f"output directory '{output_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if os.listdir(output_dir):
+ print(
+ f"output directory '{output_dir}' is not empty",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ fix_files(input_dir, output_dir)
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 0000000000..d309d6e975
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 0000000000..4fd239765b
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 0000000000..1446b94a5e
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 0000000000..11957ce271
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 0000000000..a0406dba8c
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 0000000000..5ea33d18c0
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.cfg b/setup.cfg
index c3a2b39f65..093711f703 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -17,3 +17,12 @@
# Generated by synthtool. DO NOT EDIT!
[bdist_wheel]
universal = 1
+[pytype]
+python_version = 3.8
+inputs =
+ google/cloud/
+exclude =
+ tests/
+output = .pytype/
+# Workaround for https://github.com/google/pytype/issues/150
+disable = pyi-error
diff --git a/setup.py b/setup.py
index 7934d606ed..6552038980 100644
--- a/setup.py
+++ b/setup.py
@@ -22,12 +22,13 @@
name = "google-cloud-firestore"
description = "Google Cloud Firestore API client library"
-version = "1.7.0"
+version = "2.0.0"
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 1.14.0, < 2.0.0dev",
- "google-cloud-core >= 1.0.3, < 2.0dev",
+ "google-api-core[grpc] >= 1.22.1, < 2.0.0dev",
+ "google-cloud-core >= 1.4.1, < 2.0dev",
"pytz",
+ "proto-plus >= 1.3.0",
]
extras = {}
@@ -65,21 +66,25 @@
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
"Operating System :: OS Independent",
"Topic :: Internet",
+ "Topic :: Software Development :: Libraries :: Python Modules",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
- python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
+ python_requires=">=3.6",
+ scripts=[
+ "scripts/fixup_firestore_v1_keywords.py",
+ "scripts/fixup_firestore_admin_v1_keywords.py",
+ ],
include_package_data=True,
zip_safe=False,
)
diff --git a/synth.metadata b/synth.metadata
index 3740fc0032..3069caf916 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -1,54 +1,36 @@
{
"sources": [
- {
- "generator": {
- "name": "artman",
- "version": "2.0.0",
- "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098"
- }
- },
{
"git": {
"name": ".",
"remote": "git@github.com:googleapis/python-firestore",
- "sha": "30ca7962134dd534bbc2a00e40de7e0b35401464"
+ "sha": "db5f286772592460b2bf02df25a121994889585d"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "756b174de4a122461993c1c583345533d819936d",
- "internalRef": "308824110"
+ "sha": "2131e2f755b3c2604e2d08de81a299fd7e377dcd",
+ "internalRef": "338527875"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "01b6f23d24b27878b48667ce597876d66b59780e"
+ "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc"
}
}
],
"destinations": [
- {
- "client": {
- "source": "googleapis",
- "apiName": "firestore",
- "apiVersion": "v1beta1",
- "language": "python",
- "generator": "gapic",
- "config": "google/firestore/artman_firestore.yaml"
- }
- },
{
"client": {
"source": "googleapis",
"apiName": "firestore",
"apiVersion": "v1",
"language": "python",
- "generator": "gapic",
- "config": "google/firestore/artman_firestore_v1.yaml"
+ "generator": "bazel"
}
},
{
@@ -57,8 +39,7 @@
"apiName": "firestore_admin",
"apiVersion": "v1",
"language": "python",
- "generator": "gapic",
- "config": "google/firestore/admin/artman_firestore_v1.yaml"
+ "generator": "bazel"
}
}
]
diff --git a/synth.py b/synth.py
index d6302dd894..8a363c5922 100644
--- a/synth.py
+++ b/synth.py
@@ -21,7 +21,7 @@
gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
-versions = ["v1beta1", "v1"]
+versions = ["v1"]
admin_versions = ["v1"]
@@ -33,24 +33,19 @@
service="firestore",
version=version,
bazel_target=f"//google/firestore/{version}:firestore-{version}-py",
- include_protos=True,
)
- s.move(library / f"google/cloud/firestore_{version}/proto")
- s.move(library / f"google/cloud/firestore_{version}/gapic")
- s.move(library / f"tests/unit/gapic/{version}")
-
- s.replace(
- f"tests/unit/gapic/{version}/test_firestore_client_{version}.py",
- f"from google.cloud import firestore_{version}",
- f"from google.cloud.firestore_{version}.gapic import firestore_client",
+ s.move(
+ library / f"google/cloud/firestore_{version}",
+ f"google/cloud/firestore_{version}",
+ excludes=[library / f"google/cloud/firestore_{version}/__init__.py"],
)
- s.replace(
- f"tests/unit/gapic/{version}/test_firestore_client_{version}.py",
- f"client = firestore_{version}.FirestoreClient",
- "client = firestore_client.FirestoreClient",
+ s.move(
+ library / f"tests/",
+ f"tests",
)
+ s.move(library / "scripts")
# ----------------------------------------------------------------------------
@@ -61,22 +56,30 @@
service="firestore_admin",
version=version,
bazel_target=f"//google/firestore/admin/{version}:firestore-admin-{version}-py",
- include_protos=True,
)
- s.move(library / f"google/cloud/firestore_admin_{version}")
- s.move(library / "tests")
-
- s.replace(
- f"google/cloud/firestore_admin_{version}/gapic/firestore_admin_client.py",
- "'google-cloud-firestore-admin'",
- "'google-cloud-firestore'",
+ s.move(
+ library / f"google/cloud/firestore_admin_{version}",
+ f"google/cloud/firestore_admin_{version}",
+ excludes=[library / f"google/cloud/admin_{version}/__init__.py"],
)
+ s.move(library / f"tests", f"tests")
+ s.move(library / "scripts")
+
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(unit_cov_level=97, cov_level=99)
-s.move(templated_files)
+templated_files = common.py_library(
+ samples=False, # set to True only if there are samples
+ unit_test_python_versions=["3.6", "3.7", "3.8"],
+ system_test_python_versions=["3.7"],
+ microgenerator=True,
+ cov_level=97, # https://github.com/googleapis/python-firestore/issues/190
+)
+
+s.move(
+ templated_files,
+)
s.replace(
"noxfile.py",
@@ -90,5 +93,126 @@
'"--verbose", system_test',
)
+# Add pytype support
+s.replace(
+ ".gitignore",
+ """\
+.pytest_cache
+""",
+ """\
+.pytest_cache
+.pytype
+""",
+)
+
+s.replace(
+ "setup.cfg",
+ """\
+universal = 1
+""",
+ """\
+universal = 1
+[pytype]
+python_version = 3.8
+inputs =
+ google/cloud/
+exclude =
+ tests/
+output = .pytype/
+# Workaround for https://github.com/google/pytype/issues/150
+disable = pyi-error
+""",
+)
+
+s.replace(
+ "noxfile.py",
+ """\
+BLACK_VERSION = "black==19.10b0"
+""",
+ """\
+PYTYPE_VERSION = "pytype==2020.7.24"
+BLACK_VERSION = "black==19.10b0"
+""",
+)
+
+s.replace(
+ "noxfile.py",
+ """\
+@nox.session\(python=DEFAULT_PYTHON_VERSION\)
+def lint_setup_py\(session\):
+""",
+ '''\
+@nox.session(python="3.7")
+def pytype(session):
+ """Run pytype
+ """
+ session.install(PYTYPE_VERSION)
+ session.run("pytype",)
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint_setup_py(session):
+''',
+)
+
+# Fix up unit test dependencies
+
+s.replace(
+ "noxfile.py",
+ """\
+ session.install\("asyncmock", "pytest-asyncio"\)
+""",
+ """\
+ session.install("pytest-asyncio", "aiounittest")
+""",
+)
+
+# Fix up system test dependencies
+
+s.replace(
+ "noxfile.py",
+ """"mock", "pytest", "google-cloud-testutils",""",
+ """"mock", "pytest", "pytest-asyncio", "google-cloud-testutils",""",
+)
+
+
+# Add message for missing 'libcst' dependency
+s.replace(
+ "scripts/fixup*.py",
+ """\
+import libcst as cst
+""",
+ """\
+
+try:
+ import libcst as cst
+except ImportError:
+ raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.')
+
+
+""",
+)
+
+s.replace(
+ ".coveragerc",
+ """\
+ raise NotImplementedError
+omit =
+""",
+ """\
+ raise NotImplementedError
+ # Ignore setuptools-less fallback
+ except pkg_resources.DistributionNotFound:
+omit =
+""",
+)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
+
+s.replace(
+ ".kokoro/build.sh",
+ "# Setup service account credentials.",
+ """\
+# Setup firestore account credentials
+export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json
+
+# Setup service account credentials.""",
+)
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 0000000000..b05fbd6308
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file
diff --git a/tests/system/test__helpers.py b/tests/system/test__helpers.py
new file mode 100644
index 0000000000..f5541fd8a2
--- /dev/null
+++ b/tests/system/test__helpers.py
@@ -0,0 +1,14 @@
+import os
+import re
+from google.cloud.firestore_v1.base_client import _FIRESTORE_EMULATOR_HOST
+from test_utils.system import unique_resource_id
+from test_utils.system import EmulatorCreds
+
+FIRESTORE_CREDS = os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS")
+FIRESTORE_PROJECT = os.environ.get("GCLOUD_PROJECT")
+RANDOM_ID_REGEX = re.compile("^[a-zA-Z0-9]{20}$")
+MISSING_DOCUMENT = "No document to update: "
+DOCUMENT_EXISTS = "Document already exists: "
+UNIQUE_RESOURCE_ID = unique_resource_id("-")
+EMULATOR_CREDS = EmulatorCreds()
+FIRESTORE_EMULATOR = os.environ.get(_FIRESTORE_EMULATOR_HOST) is not None
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
index 71ac07fcee..355c5aebb8 100644
--- a/tests/system/test_system.py
+++ b/tests/system/test_system.py
@@ -13,39 +13,49 @@
# limitations under the License.
import datetime
+import itertools
import math
import operator
-import os
-import re
from google.oauth2 import service_account
-from google.protobuf import timestamp_pb2
import pytest
-import six
from google.api_core.exceptions import AlreadyExists
from google.api_core.exceptions import FailedPrecondition
from google.api_core.exceptions import InvalidArgument
from google.api_core.exceptions import NotFound
-from google.cloud._helpers import _pb_timestamp_to_datetime
+from google.cloud._helpers import _datetime_to_pb_timestamp
from google.cloud._helpers import UTC
from google.cloud import firestore_v1 as firestore
-from test_utils.system import unique_resource_id
from time import sleep
-FIRESTORE_CREDS = os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS")
-FIRESTORE_PROJECT = os.environ.get("GCLOUD_PROJECT")
-RANDOM_ID_REGEX = re.compile("^[a-zA-Z0-9]{20}$")
-MISSING_DOCUMENT = "No document to update: "
-DOCUMENT_EXISTS = "Document already exists: "
-UNIQUE_RESOURCE_ID = unique_resource_id("-")
+from tests.system.test__helpers import (
+ FIRESTORE_CREDS,
+ FIRESTORE_PROJECT,
+ RANDOM_ID_REGEX,
+ MISSING_DOCUMENT,
+ UNIQUE_RESOURCE_ID,
+ EMULATOR_CREDS,
+ FIRESTORE_EMULATOR,
+)
+
+
+def _get_credentials_and_project():
+ if FIRESTORE_EMULATOR:
+ credentials = EMULATOR_CREDS
+ project = FIRESTORE_PROJECT
+ else:
+ credentials = service_account.Credentials.from_service_account_file(
+ FIRESTORE_CREDS
+ )
+ project = FIRESTORE_PROJECT or credentials.project_id
+ return credentials, project
-@pytest.fixture(scope=u"module")
+@pytest.fixture(scope="module")
def client():
- credentials = service_account.Credentials.from_service_account_file(FIRESTORE_CREDS)
- project = FIRESTORE_PROJECT or credentials.project_id
+ credentials, project = _get_credentials_and_project()
yield firestore.Client(project=project, credentials=credentials)
@@ -63,6 +73,16 @@ def test_collections(client):
assert isinstance(collections, list)
+def test_collections_w_import():
+ from google.cloud import firestore
+
+ credentials, project = _get_credentials_and_project()
+ client = firestore.Client(project=project, credentials=credentials)
+ collections = list(client.collections())
+
+ assert isinstance(collections, list)
+
+
def test_create_document(client, cleanup):
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
collection_id = "doc-create" + UNIQUE_RESOURCE_ID
@@ -78,7 +98,7 @@ def test_create_document(client, cleanup):
"also": {"nestednow": firestore.SERVER_TIMESTAMP, "quarter": 0.25},
}
write_result = document.create(data)
- updated = _pb_timestamp_to_datetime(write_result.update_time)
+ updated = write_result.update_time
delta = updated - now
# Allow a bit of clock skew, but make sure timestamps are close.
assert -300.0 < delta.total_seconds() < 300.0
@@ -95,7 +115,9 @@ def test_create_document(client, cleanup):
# NOTE: We could check the ``transform_results`` from the write result
# for the document transform, but this value gets dropped. Instead
# we make sure the timestamps are close.
- assert 0.0 <= delta.total_seconds() < 5.0
+ # TODO(microgen): this was 0.0 - 5.0 before. After microgen, This started
+ # getting very small negative times.
+ assert -0.2 <= delta.total_seconds() < 5.0
expected_data = {
"now": server_now,
"eenta-ger": data["eenta-ger"],
@@ -126,6 +148,7 @@ def test_create_document_w_subcollection(client, cleanup):
assert sorted(child.id for child in children) == sorted(child_ids)
+@pytest.mark.skipif(FIRESTORE_EMULATOR, reason="Internal Issue b/137866686")
def test_cannot_use_foreign_key(client, cleanup):
document_id = "cannot" + UNIQUE_RESOURCE_ID
document = client.document("foreign-key", document_id)
@@ -142,9 +165,7 @@ def test_cannot_use_foreign_key(client, cleanup):
def assert_timestamp_less(timestamp_pb1, timestamp_pb2):
- dt_val1 = _pb_timestamp_to_datetime(timestamp_pb1)
- dt_val2 = _pb_timestamp_to_datetime(timestamp_pb2)
- assert dt_val1 < dt_val2
+ assert timestamp_pb1 < timestamp_pb2
def test_no_document(client):
@@ -280,6 +301,7 @@ def test_document_update_w_int_field(client, cleanup):
assert snapshot1.to_dict() == expected
+@pytest.mark.skipif(FIRESTORE_EMULATOR, reason="Internal Issue b/137867104")
def test_update_document(client, cleanup):
document_id = "for-update" + UNIQUE_RESOURCE_ID
document = client.document("made", document_id)
@@ -333,11 +355,13 @@ def test_update_document(client, cleanup):
document.update({"bad": "time-past"}, option=option4)
# 6. Call ``update()`` with invalid (in future) "last timestamp" option.
- timestamp_pb = timestamp_pb2.Timestamp(
- seconds=snapshot4.update_time.nanos + 3600, nanos=snapshot4.update_time.nanos
- )
+ timestamp_pb = _datetime_to_pb_timestamp(snapshot4.update_time)
+ timestamp_pb.seconds += 3600
+
option6 = client.write_option(last_update_time=timestamp_pb)
- with pytest.raises(FailedPrecondition) as exc_info:
+ # TODO(microgen):invalid argument thrown after microgen.
+ # with pytest.raises(FailedPrecondition) as exc_info:
+ with pytest.raises(InvalidArgument) as exc_info:
document.update({"bad": "time-future"}, option=option6)
@@ -366,7 +390,7 @@ def test_document_get(client, cleanup):
"fire": 199099299,
"referee": ref_doc,
"gio": firestore.GeoPoint(45.5, 90.0),
- "deep": [u"some", b"\xde\xad\xbe\xef"],
+ "deep": ["some", b"\xde\xad\xbe\xef"],
"map": {"ice": True, "water": None, "vapor": {"deeper": now}},
}
write_result = document.create(data)
@@ -383,19 +407,23 @@ def test_document_delete(client, cleanup):
# 1. Call ``delete()`` with invalid (in the past) "last timestamp" option.
snapshot1 = document.get()
- timestamp_pb = timestamp_pb2.Timestamp(
- seconds=snapshot1.update_time.nanos - 3600, nanos=snapshot1.update_time.nanos
- )
+ timestamp_pb = _datetime_to_pb_timestamp(snapshot1.update_time)
+ timestamp_pb.seconds += 3600
+
option1 = client.write_option(last_update_time=timestamp_pb)
- with pytest.raises(FailedPrecondition):
+ # TODO(microgen):invalid argument thrown after microgen.
+ # with pytest.raises(FailedPrecondition):
+ with pytest.raises(InvalidArgument):
document.delete(option=option1)
# 2. Call ``delete()`` with invalid (in future) "last timestamp" option.
- timestamp_pb = timestamp_pb2.Timestamp(
- seconds=snapshot1.update_time.nanos + 3600, nanos=snapshot1.update_time.nanos
- )
+ timestamp_pb = _datetime_to_pb_timestamp(snapshot1.update_time)
+ timestamp_pb.seconds += 3600
+
option2 = client.write_option(last_update_time=timestamp_pb)
- with pytest.raises(FailedPrecondition):
+ # TODO(microgen):invalid argument thrown after microgen.
+ # with pytest.raises(FailedPrecondition):
+ with pytest.raises(InvalidArgument):
document.delete(option=option2)
# 3. Actually ``delete()`` the document.
@@ -407,6 +435,8 @@ def test_document_delete(client, cleanup):
def test_collection_add(client, cleanup):
+ # TODO(microgen): list_documents is returning a generator, not a list.
+ # Consider if this is desired. Also, Document isn't hashable.
collection_id = "coll-add" + UNIQUE_RESOURCE_ID
collection1 = client.collection(collection_id)
collection2 = client.collection(collection_id, "doc", "child")
@@ -501,7 +531,7 @@ def query_docs(client):
cleanup = []
stored = {}
num_vals = 5
- allowed_vals = six.moves.xrange(num_vals)
+ allowed_vals = range(num_vals)
for a_val in allowed_vals:
for b_val in allowed_vals:
document_data = {
@@ -526,7 +556,7 @@ def test_query_stream_w_simple_field_eq_op(query_docs):
query = collection.where("a", "==", 1)
values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
assert len(values) == len(allowed_vals)
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
assert value["a"] == 1
@@ -536,7 +566,7 @@ def test_query_stream_w_simple_field_array_contains_op(query_docs):
query = collection.where("c", "array_contains", 1)
values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
assert len(values) == len(allowed_vals)
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
assert value["a"] == 1
@@ -547,18 +577,48 @@ def test_query_stream_w_simple_field_in_op(query_docs):
query = collection.where("a", "in", [1, num_vals + 100])
values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
assert len(values) == len(allowed_vals)
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
assert value["a"] == 1
+def test_query_stream_w_not_eq_op(query_docs):
+ collection, stored, allowed_vals = query_docs
+ query = collection.where("stats.sum", "!=", 4)
+ values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
+ assert len(values) == 20
+ ab_pairs2 = set()
+ for key, value in values.items():
+ assert stored[key] == value
+ ab_pairs2.add((value["a"], value["b"]))
+
+ expected_ab_pairs = set(
+ [
+ (a_val, b_val)
+ for a_val in allowed_vals
+ for b_val in allowed_vals
+ if a_val + b_val != 4
+ ]
+ )
+ assert expected_ab_pairs == ab_pairs2
+
+
+def test_query_stream_w_simple_not_in_op(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ query = collection.where("stats.sum", "not-in", [2, num_vals + 100])
+ values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
+
+ assert len(values) == 22
+
+
def test_query_stream_w_simple_field_array_contains_any_op(query_docs):
collection, stored, allowed_vals = query_docs
num_vals = len(allowed_vals)
query = collection.where("c", "array_contains_any", [1, num_vals * 200])
values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
assert len(values) == len(allowed_vals)
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
assert value["a"] == 1
@@ -582,7 +642,7 @@ def test_query_stream_w_field_path(query_docs):
values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
assert len(values) == 10
ab_pairs2 = set()
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
ab_pairs2.add((value["a"], value["b"]))
@@ -626,7 +686,7 @@ def test_query_stream_w_projection(query_docs):
query = collection.where("b", "<=", 1).select(["a", "stats.product"])
values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()}
assert len(values) == num_vals * 2 # a ANY, b in (0, 1)
- for key, value in six.iteritems(values):
+ for key, value in values.items():
expected = {
"a": stored[key]["a"],
"stats": {"product": stored[key]["stats"]["product"]},
@@ -645,7 +705,7 @@ def test_query_stream_w_multiple_filters(query_docs):
if 5 < a_val * b_val < 10
]
assert len(values) == len(matching_pairs)
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
pair = (value["a"], value["b"])
assert pair in matching_pairs
@@ -661,14 +721,14 @@ def test_query_stream_w_offset(query_docs):
# an ``order_by('a')``, which combined with the ``b == 2``
# filter would necessitate an index.
assert len(values) == num_vals - offset
- for key, value in six.iteritems(values):
+ for key, value in values.items():
assert stored[key] == value
assert value["b"] == 2
def test_query_with_order_dot_key(client, cleanup):
db = client
- collection_id = "collek" + unique_resource_id("-")
+ collection_id = "collek" + UNIQUE_RESOURCE_ID
collection = db.collection(collection_id)
for index in range(100, -1, -1):
doc = collection.document("test_{:09d}".format(index))
@@ -688,9 +748,9 @@ def test_query_with_order_dot_key(client, cleanup):
.stream()
)
found_data = [
- {u"count": 30, u"wordcount": {u"page1": 130}},
- {u"count": 40, u"wordcount": {u"page1": 140}},
- {u"count": 50, u"wordcount": {u"page1": 150}},
+ {"count": 30, "wordcount": {"page1": 130}},
+ {"count": 40, "wordcount": {"page1": 140}},
+ {"count": 50, "wordcount": {"page1": 150}},
]
assert found_data == [snap.to_dict() for snap in found]
cursor_with_dotted_paths = {"wordcount.page1": last_value}
@@ -861,6 +921,64 @@ def test_collection_group_queries_filters(client, cleanup):
assert found == set(["cg-doc2"])
+def test_partition_query_no_partitions(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+
+ # less than minimum partition size
+ doc_paths = [
+ "abc/123/" + collection_group + "/cg-doc1",
+ "abc/123/" + collection_group + "/cg-doc2",
+ collection_group + "/cg-doc3",
+ collection_group + "/cg-doc4",
+ "def/456/" + collection_group + "/cg-doc5",
+ ]
+
+ batch = client.batch()
+ cleanup_batch = client.batch()
+ cleanup(cleanup_batch.commit)
+ for doc_path in doc_paths:
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": 1})
+ cleanup_batch.delete(doc_ref)
+
+ batch.commit()
+
+ query = client.collection_group(collection_group)
+ partitions = list(query.get_partitions(3))
+ streams = [partition.query().stream() for partition in partitions]
+ snapshots = itertools.chain(*streams)
+ found = [snapshot.id for snapshot in snapshots]
+ expected = ["cg-doc1", "cg-doc2", "cg-doc3", "cg-doc4", "cg-doc5"]
+ assert found == expected
+
+
+def test_partition_query(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+ n_docs = 128 * 2 + 127 # Minimum partition size is 128
+ parents = itertools.cycle(("", "abc/123/", "def/456/", "ghi/789/"))
+ batch = client.batch()
+ cleanup_batch = client.batch()
+ cleanup(cleanup_batch.commit)
+ expected = []
+ for i, parent in zip(range(n_docs), parents):
+ doc_path = parent + collection_group + f"/cg-doc{i:03d}"
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": i})
+ cleanup_batch.delete(doc_ref)
+ expected.append(doc_path)
+
+ batch.commit()
+
+ query = client.collection_group(collection_group)
+ partitions = list(query.get_partitions(3))
+ streams = [partition.query().stream() for partition in partitions]
+ snapshots = itertools.chain(*streams)
+ found = [snapshot.reference.path for snapshot in snapshots]
+ expected.sort()
+ assert found == expected
+
+
+@pytest.mark.skipif(FIRESTORE_EMULATOR, reason="Internal Issue b/137865992")
def test_get_all(client, cleanup):
collection_name = "get-all" + UNIQUE_RESOURCE_ID
@@ -940,7 +1058,7 @@ def test_batch(client, cleanup):
write_result1 = write_results[0]
write_result2 = write_results[1]
write_result3 = write_results[2]
- assert not write_result3.HasField("update_time")
+ assert not write_result3._pb.HasField("update_time")
snapshot1 = document1.get()
assert snapshot1.to_dict() == data1
@@ -959,11 +1077,11 @@ def test_batch(client, cleanup):
def test_watch_document(client, cleanup):
db = client
- collection_ref = db.collection(u"wd-users" + UNIQUE_RESOURCE_ID)
- doc_ref = collection_ref.document(u"alovelace")
+ collection_ref = db.collection("wd-users" + UNIQUE_RESOURCE_ID)
+ doc_ref = collection_ref.document("alovelace")
# Initial setting
- doc_ref.set({u"first": u"Jane", u"last": u"Doe", u"born": 1900})
+ doc_ref.set({"first": "Jane", "last": "Doe", "born": 1900})
cleanup(doc_ref.delete)
sleep(1)
@@ -977,7 +1095,7 @@ def on_snapshot(docs, changes, read_time):
doc_ref.on_snapshot(on_snapshot)
# Alter document
- doc_ref.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815})
+ doc_ref.set({"first": "Ada", "last": "Lovelace", "born": 1815})
sleep(1)
@@ -995,11 +1113,11 @@ def on_snapshot(docs, changes, read_time):
def test_watch_collection(client, cleanup):
db = client
- collection_ref = db.collection(u"wc-users" + UNIQUE_RESOURCE_ID)
- doc_ref = collection_ref.document(u"alovelace")
+ collection_ref = db.collection("wc-users" + UNIQUE_RESOURCE_ID)
+ doc_ref = collection_ref.document("alovelace")
# Initial setting
- doc_ref.set({u"first": u"Jane", u"last": u"Doe", u"born": 1900})
+ doc_ref.set({"first": "Jane", "last": "Doe", "born": 1900})
cleanup(doc_ref.delete)
# Setup listener
@@ -1016,7 +1134,7 @@ def on_snapshot(docs, changes, read_time):
# delay here so initial on_snapshot occurs and isn't combined with set
sleep(1)
- doc_ref.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815})
+ doc_ref.set({"first": "Ada", "last": "Lovelace", "born": 1815})
for _ in range(10):
if on_snapshot.born == 1815:
@@ -1031,12 +1149,12 @@ def on_snapshot(docs, changes, read_time):
def test_watch_query(client, cleanup):
db = client
- collection_ref = db.collection(u"wq-users" + UNIQUE_RESOURCE_ID)
- doc_ref = collection_ref.document(u"alovelace")
- query_ref = collection_ref.where("first", "==", u"Ada")
+ collection_ref = db.collection("wq-users" + UNIQUE_RESOURCE_ID)
+ doc_ref = collection_ref.document("alovelace")
+ query_ref = collection_ref.where("first", "==", "Ada")
# Initial setting
- doc_ref.set({u"first": u"Jane", u"last": u"Doe", u"born": 1900})
+ doc_ref.set({"first": "Jane", "last": "Doe", "born": 1900})
cleanup(doc_ref.delete)
sleep(1)
@@ -1046,7 +1164,7 @@ def on_snapshot(docs, changes, read_time):
on_snapshot.called_count += 1
# A snapshot should return the same thing as if a query ran now.
- query_ran = collection_ref.where("first", "==", u"Ada").stream()
+ query_ran = collection_ref.where("first", "==", "Ada").stream()
assert len(docs) == len([i for i in query_ran])
on_snapshot.called_count = 0
@@ -1054,7 +1172,7 @@ def on_snapshot(docs, changes, read_time):
query_ref.on_snapshot(on_snapshot)
# Alter document
- doc_ref.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815})
+ doc_ref.set({"first": "Ada", "last": "Lovelace", "born": 1815})
for _ in range(10):
if on_snapshot.called_count == 1:
@@ -1070,14 +1188,14 @@ def on_snapshot(docs, changes, read_time):
def test_watch_query_order(client, cleanup):
db = client
- collection_ref = db.collection(u"users")
- doc_ref1 = collection_ref.document(u"alovelace" + UNIQUE_RESOURCE_ID)
- doc_ref2 = collection_ref.document(u"asecondlovelace" + UNIQUE_RESOURCE_ID)
- doc_ref3 = collection_ref.document(u"athirdlovelace" + UNIQUE_RESOURCE_ID)
- doc_ref4 = collection_ref.document(u"afourthlovelace" + UNIQUE_RESOURCE_ID)
- doc_ref5 = collection_ref.document(u"afifthlovelace" + UNIQUE_RESOURCE_ID)
+ collection_ref = db.collection("users")
+ doc_ref1 = collection_ref.document("alovelace" + UNIQUE_RESOURCE_ID)
+ doc_ref2 = collection_ref.document("asecondlovelace" + UNIQUE_RESOURCE_ID)
+ doc_ref3 = collection_ref.document("athirdlovelace" + UNIQUE_RESOURCE_ID)
+ doc_ref4 = collection_ref.document("afourthlovelace" + UNIQUE_RESOURCE_ID)
+ doc_ref5 = collection_ref.document("afifthlovelace" + UNIQUE_RESOURCE_ID)
- query_ref = collection_ref.where("first", "==", u"Ada").order_by("last")
+ query_ref = collection_ref.where("first", "==", "Ada").order_by("last")
# Setup listener
def on_snapshot(docs, changes, read_time):
@@ -1109,19 +1227,19 @@ def on_snapshot(docs, changes, read_time):
sleep(1)
- doc_ref1.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815})
+ doc_ref1.set({"first": "Ada", "last": "Lovelace", "born": 1815})
cleanup(doc_ref1.delete)
- doc_ref2.set({u"first": u"Ada", u"last": u"SecondLovelace", u"born": 1815})
+ doc_ref2.set({"first": "Ada", "last": "SecondLovelace", "born": 1815})
cleanup(doc_ref2.delete)
- doc_ref3.set({u"first": u"Ada", u"last": u"ThirdLovelace", u"born": 1815})
+ doc_ref3.set({"first": "Ada", "last": "ThirdLovelace", "born": 1815})
cleanup(doc_ref3.delete)
- doc_ref4.set({u"first": u"Ada", u"last": u"FourthLovelace", u"born": 1815})
+ doc_ref4.set({"first": "Ada", "last": "FourthLovelace", "born": 1815})
cleanup(doc_ref4.delete)
- doc_ref5.set({u"first": u"Ada", u"last": u"lovelace", u"born": 1815})
+ doc_ref5.set({"first": "Ada", "last": "lovelace", "born": 1815})
cleanup(doc_ref5.delete)
for _ in range(10):
diff --git a/tests/system/test_system_async.py b/tests/system/test_system_async.py
new file mode 100644
index 0000000000..65a46d9841
--- /dev/null
+++ b/tests/system/test_system_async.py
@@ -0,0 +1,1078 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+import datetime
+import itertools
+import math
+import pytest
+import operator
+
+from google.oauth2 import service_account
+
+from google.api_core.exceptions import AlreadyExists
+from google.api_core.exceptions import FailedPrecondition
+from google.api_core.exceptions import InvalidArgument
+from google.api_core.exceptions import NotFound
+from google.cloud._helpers import _datetime_to_pb_timestamp
+from google.cloud._helpers import UTC
+from google.cloud import firestore_v1 as firestore
+
+from tests.system.test__helpers import (
+ FIRESTORE_CREDS,
+ FIRESTORE_PROJECT,
+ RANDOM_ID_REGEX,
+ MISSING_DOCUMENT,
+ UNIQUE_RESOURCE_ID,
+ EMULATOR_CREDS,
+ FIRESTORE_EMULATOR,
+)
+
+_test_event_loop = asyncio.new_event_loop()
+pytestmark = pytest.mark.asyncio
+
+
+def _get_credentials_and_project():
+ if FIRESTORE_EMULATOR:
+ credentials = EMULATOR_CREDS
+ project = FIRESTORE_PROJECT
+ else:
+ credentials = service_account.Credentials.from_service_account_file(
+ FIRESTORE_CREDS
+ )
+ project = FIRESTORE_PROJECT or credentials.project_id
+ return credentials, project
+
+
+@pytest.fixture(scope="module")
+def client():
+ credentials, project = _get_credentials_and_project()
+ yield firestore.AsyncClient(project=project, credentials=credentials)
+
+
+@pytest.fixture
+async def cleanup():
+ operations = []
+ yield operations.append
+
+ for operation in operations:
+ await operation()
+
+
+@pytest.fixture
+def event_loop():
+ asyncio.set_event_loop(_test_event_loop)
+ return asyncio.get_event_loop()
+
+
+async def test_collections(client):
+ collections = [x async for x in client.collections()]
+ assert isinstance(collections, list)
+
+
+async def test_collections_w_import():
+ from google.cloud import firestore
+
+ credentials, project = _get_credentials_and_project()
+ client = firestore.AsyncClient(project=project, credentials=credentials)
+ collections = [x async for x in client.collections()]
+
+ assert isinstance(collections, list)
+
+
+async def test_create_document(client, cleanup):
+ now = datetime.datetime.utcnow().replace(tzinfo=UTC)
+ collection_id = "doc-create" + UNIQUE_RESOURCE_ID
+ document_id = "doc" + UNIQUE_RESOURCE_ID
+ document = client.document(collection_id, document_id)
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document.delete)
+
+ data = {
+ "now": firestore.SERVER_TIMESTAMP,
+ "eenta-ger": 11,
+ "bites": b"\xe2\x98\x83 \xe2\x9b\xb5",
+ "also": {"nestednow": firestore.SERVER_TIMESTAMP, "quarter": 0.25},
+ }
+ write_result = await document.create(data)
+
+ updated = write_result.update_time
+ delta = updated - now
+ # Allow a bit of clock skew, but make sure timestamps are close.
+ assert -300.0 < delta.total_seconds() < 300.0
+
+ with pytest.raises(AlreadyExists):
+ await document.create(data)
+
+ # Verify the server times.
+ snapshot = await document.get()
+ stored_data = snapshot.to_dict()
+ server_now = stored_data["now"]
+
+ delta = updated - server_now
+ # NOTE: We could check the ``transform_results`` from the write result
+ # for the document transform, but this value gets dropped. Instead
+ # we make sure the timestamps are close.
+ # TODO(microgen): this was 0.0 - 5.0 before. After microgen, This started
+ # getting very small negative times.
+ assert -0.2 <= delta.total_seconds() < 5.0
+ expected_data = {
+ "now": server_now,
+ "eenta-ger": data["eenta-ger"],
+ "bites": data["bites"],
+ "also": {"nestednow": server_now, "quarter": data["also"]["quarter"]},
+ }
+ assert stored_data == expected_data
+
+
+async def test_create_document_w_subcollection(client, cleanup):
+ collection_id = "doc-create-sub" + UNIQUE_RESOURCE_ID
+ document_id = "doc" + UNIQUE_RESOURCE_ID
+ document = client.document(collection_id, document_id)
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document.delete)
+
+ data = {"now": firestore.SERVER_TIMESTAMP}
+ await document.create(data)
+
+ child_ids = ["child1", "child2"]
+
+ for child_id in child_ids:
+ subcollection = document.collection(child_id)
+ _, subdoc = await subcollection.add({"foo": "bar"})
+ cleanup(subdoc.delete)
+
+ children = document.collections()
+ assert sorted([child.id async for child in children]) == sorted(child_ids)
+
+
+@pytest.mark.skipif(FIRESTORE_EMULATOR, reason="Internal Issue b/137866686")
+async def test_cannot_use_foreign_key(client, cleanup):
+ document_id = "cannot" + UNIQUE_RESOURCE_ID
+ document = client.document("foreign-key", document_id)
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document.delete)
+
+ other_client = firestore.Client(
+ project="other-prahj", credentials=client._credentials, database="dee-bee"
+ )
+ assert other_client._database_string != client._database_string
+ fake_doc = other_client.document("foo", "bar")
+ with pytest.raises(InvalidArgument):
+ await document.create({"ref": fake_doc})
+
+
+def assert_timestamp_less(timestamp_pb1, timestamp_pb2):
+ assert timestamp_pb1 < timestamp_pb2
+
+
+async def test_no_document(client):
+ document_id = "no_document" + UNIQUE_RESOURCE_ID
+ document = client.document("abcde", document_id)
+ snapshot = await document.get()
+ assert snapshot.to_dict() is None
+
+
+async def test_document_set(client, cleanup):
+ document_id = "for-set" + UNIQUE_RESOURCE_ID
+ document = client.document("i-did-it", document_id)
+ # Add to clean-up before API request (in case ``set()`` fails).
+ cleanup(document.delete)
+
+ # 0. Make sure the document doesn't exist yet
+ snapshot = await document.get()
+ assert snapshot.to_dict() is None
+
+ # 1. Use ``create()`` to create the document.
+ data1 = {"foo": 88}
+ write_result1 = await document.create(data1)
+ snapshot1 = await document.get()
+ assert snapshot1.to_dict() == data1
+ # Make sure the update is what created the document.
+ assert snapshot1.create_time == snapshot1.update_time
+ assert snapshot1.update_time == write_result1.update_time
+
+ # 2. Call ``set()`` again to overwrite.
+ data2 = {"bar": None}
+ write_result2 = await document.set(data2)
+ snapshot2 = await document.get()
+ assert snapshot2.to_dict() == data2
+ # Make sure the create time hasn't changed.
+ assert snapshot2.create_time == snapshot1.create_time
+ assert snapshot2.update_time == write_result2.update_time
+
+
+async def test_document_integer_field(client, cleanup):
+ document_id = "for-set" + UNIQUE_RESOURCE_ID
+ document = client.document("i-did-it", document_id)
+ # Add to clean-up before API request (in case ``set()`` fails).
+ cleanup(document.delete)
+
+ data1 = {"1a": {"2b": "3c", "ab": "5e"}, "6f": {"7g": "8h", "cd": "0j"}}
+ await document.create(data1)
+
+ data2 = {"1a.ab": "4d", "6f.7g": "9h"}
+ await document.update(data2)
+ snapshot = await document.get()
+ expected = {"1a": {"2b": "3c", "ab": "4d"}, "6f": {"7g": "9h", "cd": "0j"}}
+ assert snapshot.to_dict() == expected
+
+
+async def test_document_set_merge(client, cleanup):
+ document_id = "for-set" + UNIQUE_RESOURCE_ID
+ document = client.document("i-did-it", document_id)
+ # Add to clean-up before API request (in case ``set()`` fails).
+ cleanup(document.delete)
+
+ # 0. Make sure the document doesn't exist yet
+ snapshot = await document.get()
+ assert not snapshot.exists
+
+ # 1. Use ``create()`` to create the document.
+ data1 = {"name": "Sam", "address": {"city": "SF", "state": "CA"}}
+ write_result1 = await document.create(data1)
+ snapshot1 = await document.get()
+ assert snapshot1.to_dict() == data1
+ # Make sure the update is what created the document.
+ assert snapshot1.create_time == snapshot1.update_time
+ assert snapshot1.update_time == write_result1.update_time
+
+ # 2. Call ``set()`` to merge
+ data2 = {"address": {"city": "LA"}}
+ write_result2 = await document.set(data2, merge=True)
+ snapshot2 = await document.get()
+ assert snapshot2.to_dict() == {
+ "name": "Sam",
+ "address": {"city": "LA", "state": "CA"},
+ }
+ # Make sure the create time hasn't changed.
+ assert snapshot2.create_time == snapshot1.create_time
+ assert snapshot2.update_time == write_result2.update_time
+
+
+async def test_document_set_w_int_field(client, cleanup):
+ document_id = "set-int-key" + UNIQUE_RESOURCE_ID
+ document = client.document("i-did-it", document_id)
+ # Add to clean-up before API request (in case ``set()`` fails).
+ cleanup(document.delete)
+
+ # 0. Make sure the document doesn't exist yet
+ snapshot = await document.get()
+ assert not snapshot.exists
+
+ # 1. Use ``create()`` to create the document.
+ before = {"testing": "1"}
+ await document.create(before)
+
+ # 2. Replace using ``set()``.
+ data = {"14": {"status": "active"}}
+ await document.set(data)
+
+ # 3. Verify replaced data.
+ snapshot1 = await document.get()
+ assert snapshot1.to_dict() == data
+
+
+async def test_document_update_w_int_field(client, cleanup):
+ # Attempt to reproduce #5489.
+ document_id = "update-int-key" + UNIQUE_RESOURCE_ID
+ document = client.document("i-did-it", document_id)
+ # Add to clean-up before API request (in case ``set()`` fails).
+ cleanup(document.delete)
+
+ # 0. Make sure the document doesn't exist yet
+ snapshot = await document.get()
+ assert not snapshot.exists
+
+ # 1. Use ``create()`` to create the document.
+ before = {"testing": "1"}
+ await document.create(before)
+
+ # 2. Add values using ``update()``.
+ data = {"14": {"status": "active"}}
+ await document.update(data)
+
+ # 3. Verify updated data.
+ expected = before.copy()
+ expected.update(data)
+ snapshot1 = await document.get()
+ assert snapshot1.to_dict() == expected
+
+
+@pytest.mark.skipif(FIRESTORE_EMULATOR, reason="Internal Issue b/137867104")
+async def test_update_document(client, cleanup):
+ document_id = "for-update" + UNIQUE_RESOURCE_ID
+ document = client.document("made", document_id)
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document.delete)
+
+ # 0. Try to update before the document exists.
+ with pytest.raises(NotFound) as exc_info:
+ await document.update({"not": "there"})
+ assert exc_info.value.message.startswith(MISSING_DOCUMENT)
+ assert document_id in exc_info.value.message
+
+ # 1. Try to update before the document exists (now with an option).
+ with pytest.raises(NotFound) as exc_info:
+ await document.update({"still": "not-there"})
+ assert exc_info.value.message.startswith(MISSING_DOCUMENT)
+ assert document_id in exc_info.value.message
+
+ # 2. Update and create the document (with an option).
+ data = {"foo": {"bar": "baz"}, "scoop": {"barn": 981}, "other": True}
+ write_result2 = await document.create(data)
+
+ # 3. Send an update without a field path (no option).
+ field_updates3 = {"foo": {"quux": 800}}
+ write_result3 = await document.update(field_updates3)
+ assert_timestamp_less(write_result2.update_time, write_result3.update_time)
+ snapshot3 = await document.get()
+ expected3 = {
+ "foo": field_updates3["foo"],
+ "scoop": data["scoop"],
+ "other": data["other"],
+ }
+ assert snapshot3.to_dict() == expected3
+
+ # 4. Send an update **with** a field path and a delete and a valid
+ # "last timestamp" option.
+ field_updates4 = {"scoop.silo": None, "other": firestore.DELETE_FIELD}
+ option4 = client.write_option(last_update_time=snapshot3.update_time)
+ write_result4 = await document.update(field_updates4, option=option4)
+ assert_timestamp_less(write_result3.update_time, write_result4.update_time)
+ snapshot4 = await document.get()
+ expected4 = {
+ "foo": field_updates3["foo"],
+ "scoop": {"barn": data["scoop"]["barn"], "silo": field_updates4["scoop.silo"]},
+ }
+ assert snapshot4.to_dict() == expected4
+
+ # 5. Call ``update()`` with invalid (in the past) "last timestamp" option.
+ assert_timestamp_less(option4._last_update_time, snapshot4.update_time)
+ with pytest.raises(FailedPrecondition) as exc_info:
+ await document.update({"bad": "time-past"}, option=option4)
+
+ # 6. Call ``update()`` with invalid (in future) "last timestamp" option.
+ # TODO(microgen): start using custom datetime with nanos in protoplus?
+ timestamp_pb = _datetime_to_pb_timestamp(snapshot4.update_time)
+ timestamp_pb.seconds += 3600
+
+ option6 = client.write_option(last_update_time=timestamp_pb)
+ # TODO(microgen):invalid argument thrown after microgen.
+ # with pytest.raises(FailedPrecondition) as exc_info:
+ with pytest.raises(InvalidArgument) as exc_info:
+ await document.update({"bad": "time-future"}, option=option6)
+
+
+def check_snapshot(snapshot, document, data, write_result):
+ assert snapshot.reference is document
+ assert snapshot.to_dict() == data
+ assert snapshot.exists
+ assert snapshot.create_time == write_result.update_time
+ assert snapshot.update_time == write_result.update_time
+
+
+async def test_document_get(client, cleanup):
+ now = datetime.datetime.utcnow().replace(tzinfo=UTC)
+ document_id = "for-get" + UNIQUE_RESOURCE_ID
+ document = client.document("created", document_id)
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document.delete)
+
+ # First make sure it doesn't exist.
+ assert not (await document.get()).exists
+
+ ref_doc = client.document("top", "middle1", "middle2", "bottom")
+ data = {
+ "turtle": "power",
+ "cheese": 19.5,
+ "fire": 199099299,
+ "referee": ref_doc,
+ "gio": firestore.GeoPoint(45.5, 90.0),
+ "deep": ["some", b"\xde\xad\xbe\xef"],
+ "map": {"ice": True, "water": None, "vapor": {"deeper": now}},
+ }
+ write_result = await document.create(data)
+ snapshot = await document.get()
+ check_snapshot(snapshot, document, data, write_result)
+
+
+async def test_document_delete(client, cleanup):
+ document_id = "deleted" + UNIQUE_RESOURCE_ID
+ document = client.document("here-to-be", document_id)
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document.delete)
+ await document.create({"not": "much"})
+
+ # 1. Call ``delete()`` with invalid (in the past) "last timestamp" option.
+ snapshot1 = await document.get()
+ timestamp_pb = _datetime_to_pb_timestamp(snapshot1.update_time)
+ timestamp_pb.seconds += 3600
+
+ option1 = client.write_option(last_update_time=timestamp_pb)
+ # TODO(microgen):invalid argument thrown after microgen.
+ # with pytest.raises(FailedPrecondition):
+ with pytest.raises(InvalidArgument):
+ await document.delete(option=option1)
+
+ # 2. Call ``delete()`` with invalid (in future) "last timestamp" option.
+ timestamp_pb = _datetime_to_pb_timestamp(snapshot1.update_time)
+ timestamp_pb.seconds += 3600
+
+ option2 = client.write_option(last_update_time=timestamp_pb)
+ # TODO(microgen):invalid argument thrown after microgen.
+ # with pytest.raises(FailedPrecondition):
+ with pytest.raises(InvalidArgument):
+ await document.delete(option=option2)
+
+ # 3. Actually ``delete()`` the document.
+ delete_time3 = await document.delete()
+
+ # 4. ``delete()`` again, even though we know the document is gone.
+ delete_time4 = await document.delete()
+ assert_timestamp_less(delete_time3, delete_time4)
+
+
+async def test_collection_add(client, cleanup):
+ # TODO(microgen): list_documents is returning a generator, not a list.
+ # Consider if this is desired. Also, Document isn't hashable.
+ collection_id = "coll-add" + UNIQUE_RESOURCE_ID
+ collection1 = client.collection(collection_id)
+ collection2 = client.collection(collection_id, "doc", "child")
+ collection3 = client.collection(collection_id, "table", "child")
+ explicit_doc_id = "hula" + UNIQUE_RESOURCE_ID
+
+ assert set([i async for i in collection1.list_documents()]) == set()
+ assert set([i async for i in collection2.list_documents()]) == set()
+ assert set([i async for i in collection3.list_documents()]) == set()
+
+ # Auto-ID at top-level.
+ data1 = {"foo": "bar"}
+ update_time1, document_ref1 = await collection1.add(data1)
+ cleanup(document_ref1.delete)
+ assert set([i async for i in collection1.list_documents()]) == {document_ref1}
+ assert set([i async for i in collection2.list_documents()]) == set()
+ assert set([i async for i in collection3.list_documents()]) == set()
+ snapshot1 = await document_ref1.get()
+ assert snapshot1.to_dict() == data1
+ assert snapshot1.update_time == update_time1
+ assert RANDOM_ID_REGEX.match(document_ref1.id)
+
+ # Explicit ID at top-level.
+ data2 = {"baz": 999}
+ update_time2, document_ref2 = await collection1.add(
+ data2, document_id=explicit_doc_id
+ )
+ cleanup(document_ref2.delete)
+ assert set([i async for i in collection1.list_documents()]) == {
+ document_ref1,
+ document_ref2,
+ }
+ assert set([i async for i in collection2.list_documents()]) == set()
+ assert set([i async for i in collection3.list_documents()]) == set()
+ snapshot2 = await document_ref2.get()
+ assert snapshot2.to_dict() == data2
+ assert snapshot2.create_time == update_time2
+ assert snapshot2.update_time == update_time2
+ assert document_ref2.id == explicit_doc_id
+
+ nested_ref = collection1.document("doc")
+
+ # Auto-ID for nested collection.
+ data3 = {"quux": b"\x00\x01\x02\x03"}
+ update_time3, document_ref3 = await collection2.add(data3)
+ cleanup(document_ref3.delete)
+ assert set([i async for i in collection1.list_documents()]) == {
+ document_ref1,
+ document_ref2,
+ nested_ref,
+ }
+ assert set([i async for i in collection2.list_documents()]) == {document_ref3}
+ assert set([i async for i in collection3.list_documents()]) == set()
+ snapshot3 = await document_ref3.get()
+ assert snapshot3.to_dict() == data3
+ assert snapshot3.update_time == update_time3
+ assert RANDOM_ID_REGEX.match(document_ref3.id)
+
+ # Explicit for nested collection.
+ data4 = {"kazaam": None, "bad": False}
+ update_time4, document_ref4 = await collection2.add(
+ data4, document_id=explicit_doc_id
+ )
+ cleanup(document_ref4.delete)
+ assert set([i async for i in collection1.list_documents()]) == {
+ document_ref1,
+ document_ref2,
+ nested_ref,
+ }
+ assert set([i async for i in collection2.list_documents()]) == {
+ document_ref3,
+ document_ref4,
+ }
+ assert set([i async for i in collection3.list_documents()]) == set()
+ snapshot4 = await document_ref4.get()
+ assert snapshot4.to_dict() == data4
+ assert snapshot4.create_time == update_time4
+ assert snapshot4.update_time == update_time4
+ assert document_ref4.id == explicit_doc_id
+
+ # Exercise "missing" document (no doc, but subcollection).
+ data5 = {"bam": 123, "folyk": False}
+ update_time5, document_ref5 = await collection3.add(data5)
+ cleanup(document_ref5.delete)
+ missing_ref = collection1.document("table")
+ assert set([i async for i in collection1.list_documents()]) == {
+ document_ref1,
+ document_ref2,
+ nested_ref,
+ missing_ref,
+ }
+ assert set([i async for i in collection2.list_documents()]) == {
+ document_ref3,
+ document_ref4,
+ }
+ assert set([i async for i in collection3.list_documents()]) == {document_ref5}
+
+
+@pytest.fixture
+async def query_docs(client):
+ collection_id = "qs" + UNIQUE_RESOURCE_ID
+ sub_collection = "child" + UNIQUE_RESOURCE_ID
+ collection = client.collection(collection_id, "doc", sub_collection)
+
+ cleanup = []
+ stored = {}
+ num_vals = 5
+ allowed_vals = range(num_vals)
+ for a_val in allowed_vals:
+ for b_val in allowed_vals:
+ document_data = {
+ "a": a_val,
+ "b": b_val,
+ "c": [a_val, num_vals * 100],
+ "stats": {"sum": a_val + b_val, "product": a_val * b_val},
+ }
+ _, doc_ref = await collection.add(document_data)
+ # Add to clean-up.
+ cleanup.append(doc_ref.delete)
+ stored[doc_ref.id] = document_data
+
+ yield collection, stored, allowed_vals
+
+ for operation in cleanup:
+ await operation()
+
+
+async def test_query_stream_w_simple_field_eq_op(query_docs):
+ collection, stored, allowed_vals = query_docs
+ query = collection.where("a", "==", 1)
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ assert len(values) == len(allowed_vals)
+ for key, value in values.items():
+ assert stored[key] == value
+ assert value["a"] == 1
+
+
+async def test_query_stream_w_simple_field_array_contains_op(query_docs):
+ collection, stored, allowed_vals = query_docs
+ query = collection.where("c", "array_contains", 1)
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ assert len(values) == len(allowed_vals)
+ for key, value in values.items():
+ assert stored[key] == value
+ assert value["a"] == 1
+
+
+async def test_query_stream_w_simple_field_in_op(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ query = collection.where("a", "in", [1, num_vals + 100])
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ assert len(values) == len(allowed_vals)
+ for key, value in values.items():
+ assert stored[key] == value
+ assert value["a"] == 1
+
+
+async def test_query_stream_w_simple_field_array_contains_any_op(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ query = collection.where("c", "array_contains_any", [1, num_vals * 200])
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ assert len(values) == len(allowed_vals)
+ for key, value in values.items():
+ assert stored[key] == value
+ assert value["a"] == 1
+
+
+async def test_query_stream_w_order_by(query_docs):
+ collection, stored, allowed_vals = query_docs
+ query = collection.order_by("b", direction=firestore.Query.DESCENDING)
+ values = [(snapshot.id, snapshot.to_dict()) async for snapshot in query.stream()]
+ assert len(values) == len(stored)
+ b_vals = []
+ for key, value in values:
+ assert stored[key] == value
+ b_vals.append(value["b"])
+ # Make sure the ``b``-values are in DESCENDING order.
+ assert sorted(b_vals, reverse=True) == b_vals
+
+
+async def test_query_stream_w_field_path(query_docs):
+ collection, stored, allowed_vals = query_docs
+ query = collection.where("stats.sum", ">", 4)
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ assert len(values) == 10
+ ab_pairs2 = set()
+ for key, value in values.items():
+ assert stored[key] == value
+ ab_pairs2.add((value["a"], value["b"]))
+
+ expected_ab_pairs = set(
+ [
+ (a_val, b_val)
+ for a_val in allowed_vals
+ for b_val in allowed_vals
+ if a_val + b_val > 4
+ ]
+ )
+ assert expected_ab_pairs == ab_pairs2
+
+
+async def test_query_stream_w_start_end_cursor(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ query = (
+ collection.order_by("a")
+ .start_at({"a": num_vals - 2})
+ .end_before({"a": num_vals - 1})
+ )
+ values = [(snapshot.id, snapshot.to_dict()) async for snapshot in query.stream()]
+ assert len(values) == num_vals
+ for key, value in values:
+ assert stored[key] == value
+ assert value["a"] == num_vals - 2
+
+
+async def test_query_stream_wo_results(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ query = collection.where("b", "==", num_vals + 100)
+ values = [i async for i in query.stream()]
+ assert len(values) == 0
+
+
+async def test_query_stream_w_projection(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ query = collection.where("b", "<=", 1).select(["a", "stats.product"])
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ assert len(values) == num_vals * 2 # a ANY, b in (0, 1)
+ for key, value in values.items():
+ expected = {
+ "a": stored[key]["a"],
+ "stats": {"product": stored[key]["stats"]["product"]},
+ }
+ assert expected == value
+
+
+async def test_query_stream_w_multiple_filters(query_docs):
+ collection, stored, allowed_vals = query_docs
+ query = collection.where("stats.product", ">", 5).where("stats.product", "<", 10)
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ matching_pairs = [
+ (a_val, b_val)
+ for a_val in allowed_vals
+ for b_val in allowed_vals
+ if 5 < a_val * b_val < 10
+ ]
+ assert len(values) == len(matching_pairs)
+ for key, value in values.items():
+ assert stored[key] == value
+ pair = (value["a"], value["b"])
+ assert pair in matching_pairs
+
+
+async def test_query_stream_w_offset(query_docs):
+ collection, stored, allowed_vals = query_docs
+ num_vals = len(allowed_vals)
+ offset = 3
+ query = collection.where("b", "==", 2).offset(offset)
+ values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()}
+ # NOTE: We don't check the ``a``-values, since that would require
+ # an ``order_by('a')``, which combined with the ``b == 2``
+ # filter would necessitate an index.
+ assert len(values) == num_vals - offset
+ for key, value in values.items():
+ assert stored[key] == value
+ assert value["b"] == 2
+
+
+async def test_query_with_order_dot_key(client, cleanup):
+ db = client
+ collection_id = "collek" + UNIQUE_RESOURCE_ID
+ collection = db.collection(collection_id)
+ for index in range(100, -1, -1):
+ doc = collection.document("test_{:09d}".format(index))
+ data = {"count": 10 * index, "wordcount": {"page1": index * 10 + 100}}
+ await doc.set(data)
+ cleanup(doc.delete)
+ query = collection.order_by("wordcount.page1").limit(3)
+ data = [doc.to_dict()["wordcount"]["page1"] async for doc in query.stream()]
+ assert [100, 110, 120] == data
+ async for snapshot in collection.order_by("wordcount.page1").limit(3).stream():
+ last_value = snapshot.get("wordcount.page1")
+ cursor_with_nested_keys = {"wordcount": {"page1": last_value}}
+ found = [
+ i
+ async for i in collection.order_by("wordcount.page1")
+ .start_after(cursor_with_nested_keys)
+ .limit(3)
+ .stream()
+ ]
+ found_data = [
+ {"count": 30, "wordcount": {"page1": 130}},
+ {"count": 40, "wordcount": {"page1": 140}},
+ {"count": 50, "wordcount": {"page1": 150}},
+ ]
+ assert found_data == [snap.to_dict() for snap in found]
+ cursor_with_dotted_paths = {"wordcount.page1": last_value}
+ cursor_with_key_data = [
+ i
+ async for i in collection.order_by("wordcount.page1")
+ .start_after(cursor_with_dotted_paths)
+ .limit(3)
+ .stream()
+ ]
+ assert found_data == [snap.to_dict() for snap in cursor_with_key_data]
+
+
+async def test_query_unary(client, cleanup):
+ collection_name = "unary" + UNIQUE_RESOURCE_ID
+ collection = client.collection(collection_name)
+ field_name = "foo"
+
+ _, document0 = await collection.add({field_name: None})
+ # Add to clean-up.
+ cleanup(document0.delete)
+
+ nan_val = float("nan")
+ _, document1 = await collection.add({field_name: nan_val})
+ # Add to clean-up.
+ cleanup(document1.delete)
+
+ # 0. Query for null.
+ query0 = collection.where(field_name, "==", None)
+ values0 = [i async for i in query0.stream()]
+ assert len(values0) == 1
+ snapshot0 = values0[0]
+ assert snapshot0.reference._path == document0._path
+ assert snapshot0.to_dict() == {field_name: None}
+
+ # 1. Query for a NAN.
+ query1 = collection.where(field_name, "==", nan_val)
+ values1 = [i async for i in query1.stream()]
+ assert len(values1) == 1
+ snapshot1 = values1[0]
+ assert snapshot1.reference._path == document1._path
+ data1 = snapshot1.to_dict()
+ assert len(data1) == 1
+ assert math.isnan(data1[field_name])
+
+
+async def test_collection_group_queries(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+
+ doc_paths = [
+ "abc/123/" + collection_group + "/cg-doc1",
+ "abc/123/" + collection_group + "/cg-doc2",
+ collection_group + "/cg-doc3",
+ collection_group + "/cg-doc4",
+ "def/456/" + collection_group + "/cg-doc5",
+ collection_group + "/virtual-doc/nested-coll/not-cg-doc",
+ "x" + collection_group + "/not-cg-doc",
+ collection_group + "x/not-cg-doc",
+ "abc/123/" + collection_group + "x/not-cg-doc",
+ "abc/123/x" + collection_group + "/not-cg-doc",
+ "abc/" + collection_group,
+ ]
+
+ batch = client.batch()
+ for doc_path in doc_paths:
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": 1})
+ cleanup(doc_ref.delete)
+
+ await batch.commit()
+
+ query = client.collection_group(collection_group)
+ snapshots = [i async for i in query.stream()]
+ found = [snapshot.id for snapshot in snapshots]
+ expected = ["cg-doc1", "cg-doc2", "cg-doc3", "cg-doc4", "cg-doc5"]
+ assert found == expected
+
+
+async def test_collection_group_queries_startat_endat(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+
+ doc_paths = [
+ "a/a/" + collection_group + "/cg-doc1",
+ "a/b/a/b/" + collection_group + "/cg-doc2",
+ "a/b/" + collection_group + "/cg-doc3",
+ "a/b/c/d/" + collection_group + "/cg-doc4",
+ "a/c/" + collection_group + "/cg-doc5",
+ collection_group + "/cg-doc6",
+ "a/b/nope/nope",
+ ]
+
+ batch = client.batch()
+ for doc_path in doc_paths:
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": doc_path})
+ cleanup(doc_ref.delete)
+
+ await batch.commit()
+
+ query = (
+ client.collection_group(collection_group)
+ .order_by("__name__")
+ .start_at([client.document("a/b")])
+ .end_at([client.document("a/b0")])
+ )
+ snapshots = [i async for i in query.stream()]
+ found = set(snapshot.id for snapshot in snapshots)
+ assert found == set(["cg-doc2", "cg-doc3", "cg-doc4"])
+
+ query = (
+ client.collection_group(collection_group)
+ .order_by("__name__")
+ .start_after([client.document("a/b")])
+ .end_before([client.document("a/b/" + collection_group + "/cg-doc3")])
+ )
+ snapshots = [i async for i in query.stream()]
+ found = set(snapshot.id for snapshot in snapshots)
+ assert found == set(["cg-doc2"])
+
+
+async def test_collection_group_queries_filters(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+
+ doc_paths = [
+ "a/a/" + collection_group + "/cg-doc1",
+ "a/b/a/b/" + collection_group + "/cg-doc2",
+ "a/b/" + collection_group + "/cg-doc3",
+ "a/b/c/d/" + collection_group + "/cg-doc4",
+ "a/c/" + collection_group + "/cg-doc5",
+ collection_group + "/cg-doc6",
+ "a/b/nope/nope",
+ ]
+
+ batch = client.batch()
+
+ for index, doc_path in enumerate(doc_paths):
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": index})
+ cleanup(doc_ref.delete)
+
+ await batch.commit()
+
+ query = (
+ client.collection_group(collection_group)
+ .where(
+ firestore.field_path.FieldPath.document_id(), ">=", client.document("a/b")
+ )
+ .where(
+ firestore.field_path.FieldPath.document_id(), "<=", client.document("a/b0")
+ )
+ )
+ snapshots = [i async for i in query.stream()]
+ found = set(snapshot.id for snapshot in snapshots)
+ assert found == set(["cg-doc2", "cg-doc3", "cg-doc4"])
+
+ query = (
+ client.collection_group(collection_group)
+ .where(
+ firestore.field_path.FieldPath.document_id(), ">", client.document("a/b")
+ )
+ .where(
+ firestore.field_path.FieldPath.document_id(),
+ "<",
+ client.document("a/b/{}/cg-doc3".format(collection_group)),
+ )
+ )
+ snapshots = [i async for i in query.stream()]
+ found = set(snapshot.id for snapshot in snapshots)
+ assert found == set(["cg-doc2"])
+
+
+async def test_partition_query_no_partitions(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+
+ # less than minimum partition size
+ doc_paths = [
+ "abc/123/" + collection_group + "/cg-doc1",
+ "abc/123/" + collection_group + "/cg-doc2",
+ collection_group + "/cg-doc3",
+ collection_group + "/cg-doc4",
+ "def/456/" + collection_group + "/cg-doc5",
+ ]
+
+ batch = client.batch()
+ cleanup_batch = client.batch()
+ cleanup(cleanup_batch.commit)
+ for doc_path in doc_paths:
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": 1})
+ cleanup_batch.delete(doc_ref)
+
+ await batch.commit()
+
+ query = client.collection_group(collection_group)
+ partitions = [i async for i in query.get_partitions(3)]
+ streams = [partition.query().stream() for partition in partitions]
+ found = [snapshot.id async for snapshot in _chain(*streams)]
+ expected = ["cg-doc1", "cg-doc2", "cg-doc3", "cg-doc4", "cg-doc5"]
+ assert found == expected
+
+
+async def test_partition_query(client, cleanup):
+ collection_group = "b" + UNIQUE_RESOURCE_ID
+ n_docs = 128 * 2 + 127 # Minimum partition size is 128
+ parents = itertools.cycle(("", "abc/123/", "def/456/", "ghi/789/"))
+ batch = client.batch()
+ cleanup_batch = client.batch()
+ cleanup(cleanup_batch.commit)
+ expected = []
+ for i, parent in zip(range(n_docs), parents):
+ doc_path = parent + collection_group + f"/cg-doc{i:03d}"
+ doc_ref = client.document(doc_path)
+ batch.set(doc_ref, {"x": i})
+ cleanup_batch.delete(doc_ref)
+ expected.append(doc_path)
+
+ await batch.commit()
+
+ query = client.collection_group(collection_group)
+ partitions = [i async for i in query.get_partitions(3)]
+ streams = [partition.query().stream() for partition in partitions]
+ found = [snapshot.reference.path async for snapshot in _chain(*streams)]
+ expected.sort()
+ assert found == expected
+
+
+@pytest.mark.skipif(FIRESTORE_EMULATOR, reason="Internal Issue b/137865992")
+async def test_get_all(client, cleanup):
+ collection_name = "get-all" + UNIQUE_RESOURCE_ID
+
+ document1 = client.document(collection_name, "a")
+ document2 = client.document(collection_name, "b")
+ document3 = client.document(collection_name, "c")
+ # Add to clean-up before API requests (in case ``create()`` fails).
+ cleanup(document1.delete)
+ cleanup(document3.delete)
+
+ data1 = {"a": {"b": 2, "c": 3}, "d": 4, "e": 0}
+ write_result1 = await document1.create(data1)
+ data3 = {"a": {"b": 5, "c": 6}, "d": 7, "e": 100}
+ write_result3 = await document3.create(data3)
+
+ # 0. Get 3 unique documents, one of which is missing.
+ snapshots = [i async for i in client.get_all([document1, document2, document3])]
+
+ assert snapshots[0].exists
+ assert snapshots[1].exists
+ assert not snapshots[2].exists
+
+ snapshots = [snapshot for snapshot in snapshots if snapshot.exists]
+ id_attr = operator.attrgetter("id")
+ snapshots.sort(key=id_attr)
+
+ snapshot1, snapshot3 = snapshots
+ check_snapshot(snapshot1, document1, data1, write_result1)
+ check_snapshot(snapshot3, document3, data3, write_result3)
+
+ # 1. Get 2 colliding documents.
+ document1_also = client.document(collection_name, "a")
+ snapshots = [i async for i in client.get_all([document1, document1_also])]
+
+ assert len(snapshots) == 1
+ assert document1 is not document1_also
+ check_snapshot(snapshots[0], document1_also, data1, write_result1)
+
+ # 2. Use ``field_paths`` / projection in ``get_all()``.
+ snapshots = [
+ i
+ async for i in client.get_all([document1, document3], field_paths=["a.b", "d"])
+ ]
+
+ assert len(snapshots) == 2
+ snapshots.sort(key=id_attr)
+
+ snapshot1, snapshot3 = snapshots
+ restricted1 = {"a": {"b": data1["a"]["b"]}, "d": data1["d"]}
+ check_snapshot(snapshot1, document1, restricted1, write_result1)
+ restricted3 = {"a": {"b": data3["a"]["b"]}, "d": data3["d"]}
+ check_snapshot(snapshot3, document3, restricted3, write_result3)
+
+
+async def test_batch(client, cleanup):
+ collection_name = "batch" + UNIQUE_RESOURCE_ID
+
+ document1 = client.document(collection_name, "abc")
+ document2 = client.document(collection_name, "mno")
+ document3 = client.document(collection_name, "xyz")
+ # Add to clean-up before API request (in case ``create()`` fails).
+ cleanup(document1.delete)
+ cleanup(document2.delete)
+ cleanup(document3.delete)
+
+ data2 = {"some": {"deep": "stuff", "and": "here"}, "water": 100.0}
+ await document2.create(data2)
+ await document3.create({"other": 19})
+
+ batch = client.batch()
+ data1 = {"all": True}
+ batch.create(document1, data1)
+ new_value = "there"
+ batch.update(document2, {"some.and": new_value})
+ batch.delete(document3)
+ write_results = await batch.commit()
+
+ assert len(write_results) == 3
+
+ write_result1 = write_results[0]
+ write_result2 = write_results[1]
+ write_result3 = write_results[2]
+ assert not write_result3._pb.HasField("update_time")
+
+ snapshot1 = await document1.get()
+ assert snapshot1.to_dict() == data1
+ assert snapshot1.create_time == write_result1.update_time
+ assert snapshot1.update_time == write_result1.update_time
+
+ snapshot2 = await document2.get()
+ assert snapshot2.to_dict() != data2
+ data2["some"]["and"] = new_value
+ assert snapshot2.to_dict() == data2
+ assert_timestamp_less(snapshot2.create_time, write_result2.update_time)
+ assert snapshot2.update_time == write_result2.update_time
+
+ assert not (await document3.get()).exists
+
+
+async def _chain(*iterators):
+ """Asynchronous reimplementation of `itertools.chain`."""
+ for iterator in iterators:
+ async for value in iterator:
+ yield value
diff --git a/tests/unit/gapic/firestore_admin_v1/__init__.py b/tests/unit/gapic/firestore_admin_v1/__init__.py
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/tests/unit/gapic/firestore_admin_v1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py
new file mode 100644
index 0000000000..6773457e91
--- /dev/null
+++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py
@@ -0,0 +1,2827 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import future
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import operation_async # type: ignore
+from google.api_core import operations_v1
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.firestore_admin_v1.services.firestore_admin import (
+ FirestoreAdminAsyncClient,
+)
+from google.cloud.firestore_admin_v1.services.firestore_admin import (
+ FirestoreAdminClient,
+)
+from google.cloud.firestore_admin_v1.services.firestore_admin import pagers
+from google.cloud.firestore_admin_v1.services.firestore_admin import transports
+from google.cloud.firestore_admin_v1.types import field
+from google.cloud.firestore_admin_v1.types import field as gfa_field
+from google.cloud.firestore_admin_v1.types import firestore_admin
+from google.cloud.firestore_admin_v1.types import index
+from google.cloud.firestore_admin_v1.types import index as gfa_index
+from google.cloud.firestore_admin_v1.types import operation as gfa_operation
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert FirestoreAdminClient._get_default_mtls_endpoint(None) is None
+ assert (
+ FirestoreAdminClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ FirestoreAdminClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ FirestoreAdminClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ FirestoreAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ FirestoreAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient]
+)
+def test_firestore_admin_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ assert client._transport._host == "firestore.googleapis.com:443"
+
+
+def test_firestore_admin_client_get_transport_class():
+ transport = FirestoreAdminClient.get_transport_class()
+ assert transport == transports.FirestoreAdminGrpcTransport
+
+ transport = FirestoreAdminClient.get_transport_class("grpc")
+ assert transport == transports.FirestoreAdminGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"),
+ (
+ FirestoreAdminAsyncClient,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ FirestoreAdminClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(FirestoreAdminClient),
+)
+@mock.patch.object(
+ FirestoreAdminAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(FirestoreAdminAsyncClient),
+)
+def test_firestore_admin_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(FirestoreAdminClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(FirestoreAdminClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "true"),
+ (
+ FirestoreAdminAsyncClient,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "false"),
+ (
+ FirestoreAdminAsyncClient,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ FirestoreAdminClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(FirestoreAdminClient),
+)
+@mock.patch.object(
+ FirestoreAdminAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(FirestoreAdminAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_firestore_admin_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
+ with mock.patch(
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
+ ):
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"),
+ (
+ FirestoreAdminAsyncClient,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_firestore_admin_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"),
+ (
+ FirestoreAdminAsyncClient,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_firestore_admin_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_firestore_admin_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = FirestoreAdminClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_create_index(
+ transport: str = "grpc", request_type=firestore_admin.CreateIndexRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.create_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.CreateIndexRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_create_index_from_dict():
+ test_create_index(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_create_index_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.CreateIndexRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_index), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.create_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_create_index_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.CreateIndexRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_index), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.create_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_index_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.CreateIndexRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_index), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.create_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_index_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_index(
+ parent="parent_value", index=gfa_index.Index(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].index == gfa_index.Index(name="name_value")
+
+
+def test_create_index_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_index(
+ firestore_admin.CreateIndexRequest(),
+ parent="parent_value",
+ index=gfa_index.Index(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_index_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_index), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_index(
+ parent="parent_value", index=gfa_index.Index(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].index == gfa_index.Index(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_create_index_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_index(
+ firestore_admin.CreateIndexRequest(),
+ parent="parent_value",
+ index=gfa_index.Index(name="name_value"),
+ )
+
+
+def test_list_indexes(
+ transport: str = "grpc", request_type=firestore_admin.ListIndexesRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_indexes), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore_admin.ListIndexesResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_indexes(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ListIndexesRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListIndexesPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_indexes_from_dict():
+ test_list_indexes(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_indexes_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.ListIndexesRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_indexes), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore_admin.ListIndexesResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+
+ response = await client.list_indexes(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListIndexesAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_indexes_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ListIndexesRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_indexes), "__call__") as call:
+ call.return_value = firestore_admin.ListIndexesResponse()
+
+ client.list_indexes(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_indexes_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ListIndexesRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_indexes), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore_admin.ListIndexesResponse()
+ )
+
+ await client.list_indexes(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_indexes_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_indexes), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore_admin.ListIndexesResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_indexes(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_indexes_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_indexes(
+ firestore_admin.ListIndexesRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_indexes_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_indexes), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore_admin.ListIndexesResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore_admin.ListIndexesResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_indexes(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_indexes_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_indexes(
+ firestore_admin.ListIndexesRequest(), parent="parent_value",
+ )
+
+
+def test_list_indexes_pager():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_indexes), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(), index.Index(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListIndexesResponse(indexes=[], next_page_token="def",),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_indexes(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, index.Index) for i in results)
+
+
+def test_list_indexes_pages():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_indexes), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(), index.Index(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListIndexesResponse(indexes=[], next_page_token="def",),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_indexes(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_indexes_async_pager():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_indexes),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(), index.Index(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListIndexesResponse(indexes=[], next_page_token="def",),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_indexes(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, index.Index) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_indexes_async_pages():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_indexes),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(), index.Index(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListIndexesResponse(indexes=[], next_page_token="def",),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListIndexesResponse(
+ indexes=[index.Index(), index.Index(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_indexes(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_get_index(
+ transport: str = "grpc", request_type=firestore_admin.GetIndexRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = index.Index(
+ name="name_value",
+ query_scope=index.Index.QueryScope.COLLECTION,
+ state=index.Index.State.CREATING,
+ )
+
+ response = client.get_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.GetIndexRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, index.Index)
+
+ assert response.name == "name_value"
+
+ assert response.query_scope == index.Index.QueryScope.COLLECTION
+
+ assert response.state == index.Index.State.CREATING
+
+
+def test_get_index_from_dict():
+ test_get_index(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_index_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.GetIndexRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_index), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ index.Index(
+ name="name_value",
+ query_scope=index.Index.QueryScope.COLLECTION,
+ state=index.Index.State.CREATING,
+ )
+ )
+
+ response = await client.get_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, index.Index)
+
+ assert response.name == "name_value"
+
+ assert response.query_scope == index.Index.QueryScope.COLLECTION
+
+ assert response.state == index.Index.State.CREATING
+
+
+def test_get_index_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.GetIndexRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_index), "__call__") as call:
+ call.return_value = index.Index()
+
+ client.get_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_index_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.GetIndexRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_index), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index())
+
+ await client.get_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_index_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = index.Index()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_index(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_index_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_index(
+ firestore_admin.GetIndexRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_index_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_index), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = index.Index()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_index(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_index_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_index(
+ firestore_admin.GetIndexRequest(), name="name_value",
+ )
+
+
+def test_delete_index(
+ transport: str = "grpc", request_type=firestore_admin.DeleteIndexRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ response = client.delete_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.DeleteIndexRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_index_from_dict():
+ test_delete_index(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_delete_index_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.DeleteIndexRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_index), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ response = await client.delete_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_index_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.DeleteIndexRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_index), "__call__") as call:
+ call.return_value = None
+
+ client.delete_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_index_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.DeleteIndexRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_index), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ await client.delete_index(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_index_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_index), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_index(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_index_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_index(
+ firestore_admin.DeleteIndexRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_index_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_index), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_index(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_index_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_index(
+ firestore_admin.DeleteIndexRequest(), name="name_value",
+ )
+
+
+def test_get_field(
+ transport: str = "grpc", request_type=firestore_admin.GetFieldRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_field), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = field.Field(name="name_value",)
+
+ response = client.get_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.GetFieldRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, field.Field)
+
+ assert response.name == "name_value"
+
+
+def test_get_field_from_dict():
+ test_get_field(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_field_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.GetFieldRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_field), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ field.Field(name="name_value",)
+ )
+
+ response = await client.get_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, field.Field)
+
+ assert response.name == "name_value"
+
+
+def test_get_field_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.GetFieldRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_field), "__call__") as call:
+ call.return_value = field.Field()
+
+ client.get_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_field_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.GetFieldRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_field), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field())
+
+ await client.get_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_field_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_field), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = field.Field()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_field(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_field_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_field(
+ firestore_admin.GetFieldRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_field_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_field), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = field.Field()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_field(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_field_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_field(
+ firestore_admin.GetFieldRequest(), name="name_value",
+ )
+
+
+def test_update_field(
+ transport: str = "grpc", request_type=firestore_admin.UpdateFieldRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_field), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.update_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.UpdateFieldRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_update_field_from_dict():
+ test_update_field(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_update_field_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.UpdateFieldRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_field), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.update_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_update_field_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.UpdateFieldRequest()
+ request.field.name = "field.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_field), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.update_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "field.name=field.name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_update_field_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.UpdateFieldRequest()
+ request.field.name = "field.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_field), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.update_field(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "field.name=field.name/value",) in kw["metadata"]
+
+
+def test_update_field_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_field), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.update_field(field=gfa_field.Field(name="name_value"),)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].field == gfa_field.Field(name="name_value")
+
+
+def test_update_field_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_field(
+ firestore_admin.UpdateFieldRequest(),
+ field=gfa_field.Field(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_field_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_field), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.update_field(field=gfa_field.Field(name="name_value"),)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].field == gfa_field.Field(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_update_field_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.update_field(
+ firestore_admin.UpdateFieldRequest(),
+ field=gfa_field.Field(name="name_value"),
+ )
+
+
+def test_list_fields(
+ transport: str = "grpc", request_type=firestore_admin.ListFieldsRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_fields), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore_admin.ListFieldsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_fields(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ListFieldsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListFieldsPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_fields_from_dict():
+ test_list_fields(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_fields_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.ListFieldsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_fields), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore_admin.ListFieldsResponse(next_page_token="next_page_token_value",)
+ )
+
+ response = await client.list_fields(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListFieldsAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_fields_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ListFieldsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_fields), "__call__") as call:
+ call.return_value = firestore_admin.ListFieldsResponse()
+
+ client.list_fields(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_fields_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ListFieldsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_fields), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore_admin.ListFieldsResponse()
+ )
+
+ await client.list_fields(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_fields_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_fields), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore_admin.ListFieldsResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_fields(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_fields_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_fields(
+ firestore_admin.ListFieldsRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_fields_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_fields), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore_admin.ListFieldsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore_admin.ListFieldsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_fields(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_fields_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_fields(
+ firestore_admin.ListFieldsRequest(), parent="parent_value",
+ )
+
+
+def test_list_fields_pager():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_fields), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(), field.Field(), field.Field(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[], next_page_token="def",),
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[field.Field(), field.Field(),],),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_fields(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, field.Field) for i in results)
+
+
+def test_list_fields_pages():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_fields), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(), field.Field(), field.Field(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[], next_page_token="def",),
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[field.Field(), field.Field(),],),
+ RuntimeError,
+ )
+ pages = list(client.list_fields(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_fields_async_pager():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_fields),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(), field.Field(), field.Field(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[], next_page_token="def",),
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[field.Field(), field.Field(),],),
+ RuntimeError,
+ )
+ async_pager = await client.list_fields(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, field.Field) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_fields_async_pages():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_fields),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(), field.Field(), field.Field(),],
+ next_page_token="abc",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[], next_page_token="def",),
+ firestore_admin.ListFieldsResponse(
+ fields=[field.Field(),], next_page_token="ghi",
+ ),
+ firestore_admin.ListFieldsResponse(fields=[field.Field(), field.Field(),],),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_fields(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_export_documents(
+ transport: str = "grpc", request_type=firestore_admin.ExportDocumentsRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.export_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.export_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ExportDocumentsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_export_documents_from_dict():
+ test_export_documents(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_export_documents_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.ExportDocumentsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.export_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_export_documents_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ExportDocumentsRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.export_documents), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.export_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_export_documents_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ExportDocumentsRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_documents), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.export_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_export_documents_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.export_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.export_documents(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_export_documents_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.export_documents(
+ firestore_admin.ExportDocumentsRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_export_documents_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.export_documents(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_export_documents_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.export_documents(
+ firestore_admin.ExportDocumentsRequest(), name="name_value",
+ )
+
+
+def test_import_documents(
+ transport: str = "grpc", request_type=firestore_admin.ImportDocumentsRequest
+):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.import_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.import_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ImportDocumentsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_documents_from_dict():
+ test_import_documents(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_import_documents_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore_admin.ImportDocumentsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.import_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_documents_field_headers():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ImportDocumentsRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.import_documents), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.import_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_import_documents_field_headers_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore_admin.ImportDocumentsRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_documents), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.import_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_import_documents_flattened():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.import_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.import_documents(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_import_documents_flattened_error():
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.import_documents(
+ firestore_admin.ImportDocumentsRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_documents_flattened_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.import_documents(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_import_documents_flattened_error_async():
+ client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.import_documents(
+ firestore_admin.ImportDocumentsRequest(), name="name_value",
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.FirestoreAdminGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.FirestoreAdminGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = FirestoreAdminClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.FirestoreAdminGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = FirestoreAdminClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.FirestoreAdminGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = FirestoreAdminClient(transport=transport)
+ assert client._transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.FirestoreAdminGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.FirestoreAdminGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.FirestoreAdminGrpcTransport,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client._transport, transports.FirestoreAdminGrpcTransport,)
+
+
+def test_firestore_admin_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.FirestoreAdminTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_firestore_admin_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.FirestoreAdminTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "create_index",
+ "list_indexes",
+ "get_index",
+ "delete_index",
+ "get_field",
+ "update_field",
+ "list_fields",
+ "export_documents",
+ "import_documents",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ # Additionally, the LRO client (a property) should
+ # also raise NotImplementedError
+ with pytest.raises(NotImplementedError):
+ transport.operations_client
+
+
+def test_firestore_admin_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.FirestoreAdminTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_firestore_admin_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.FirestoreAdminTransport()
+ adc.assert_called_once()
+
+
+def test_firestore_admin_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ FirestoreAdminClient()
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ quota_project_id=None,
+ )
+
+
+def test_firestore_admin_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.FirestoreAdminGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_firestore_admin_host_no_port():
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="firestore.googleapis.com"
+ ),
+ )
+ assert client._transport._host == "firestore.googleapis.com:443"
+
+
+def test_firestore_admin_host_with_port():
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="firestore.googleapis.com:8000"
+ ),
+ )
+ assert client._transport._host == "firestore.googleapis.com:8000"
+
+
+def test_firestore_admin_grpc_transport_channel():
+ channel = grpc.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.FirestoreAdminGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+def test_firestore_admin_grpc_asyncio_transport_channel():
+ channel = aio.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.FirestoreAdminGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.FirestoreAdminGrpcTransport,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ ],
+)
+def test_firestore_admin_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.FirestoreAdminGrpcTransport,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ ],
+)
+def test_firestore_admin_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_firestore_admin_grpc_lro_client():
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+ transport = client._transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_firestore_admin_grpc_lro_async_client():
+ client = FirestoreAdminAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ transport = client._client._transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_field_path():
+ project = "squid"
+ database = "clam"
+ collection = "whelk"
+ field = "octopus"
+
+ expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format(
+ project=project, database=database, collection=collection, field=field,
+ )
+ actual = FirestoreAdminClient.field_path(project, database, collection, field)
+ assert expected == actual
+
+
+def test_parse_field_path():
+ expected = {
+ "project": "oyster",
+ "database": "nudibranch",
+ "collection": "cuttlefish",
+ "field": "mussel",
+ }
+ path = FirestoreAdminClient.field_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = FirestoreAdminClient.parse_field_path(path)
+ assert expected == actual
+
+
+def test_index_path():
+ project = "squid"
+ database = "clam"
+ collection = "whelk"
+ index = "octopus"
+
+ expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format(
+ project=project, database=database, collection=collection, index=index,
+ )
+ actual = FirestoreAdminClient.index_path(project, database, collection, index)
+ assert expected == actual
+
+
+def test_parse_index_path():
+ expected = {
+ "project": "oyster",
+ "database": "nudibranch",
+ "collection": "cuttlefish",
+ "index": "mussel",
+ }
+ path = FirestoreAdminClient.index_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = FirestoreAdminClient.parse_index_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.FirestoreAdminTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.FirestoreAdminTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = FirestoreAdminClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/firestore_v1/__init__.py b/tests/unit/gapic/firestore_v1/__init__.py
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/tests/unit/gapic/firestore_v1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py
new file mode 100644
index 0000000000..7b20d5a370
--- /dev/null
+++ b/tests/unit/gapic/firestore_v1/test_firestore.py
@@ -0,0 +1,3296 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.firestore_v1.services.firestore import FirestoreAsyncClient
+from google.cloud.firestore_v1.services.firestore import FirestoreClient
+from google.cloud.firestore_v1.services.firestore import pagers
+from google.cloud.firestore_v1.services.firestore import transports
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import document as gf_document
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import query
+from google.cloud.firestore_v1.types import write
+from google.cloud.firestore_v1.types import write as gf_write
+from google.oauth2 import service_account
+from google.protobuf import struct_pb2 as struct # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+from google.protobuf import wrappers_pb2 as wrappers # type: ignore
+from google.rpc import status_pb2 as status # type: ignore
+from google.type import latlng_pb2 as latlng # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert FirestoreClient._get_default_mtls_endpoint(None) is None
+ assert FirestoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+ assert (
+ FirestoreClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ FirestoreClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ FirestoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert FirestoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+
+@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient])
+def test_firestore_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ assert client._transport._host == "firestore.googleapis.com:443"
+
+
+def test_firestore_client_get_transport_class():
+ transport = FirestoreClient.get_transport_class()
+ assert transport == transports.FirestoreGrpcTransport
+
+ transport = FirestoreClient.get_transport_class("grpc")
+ assert transport == transports.FirestoreGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"),
+ (
+ FirestoreAsyncClient,
+ transports.FirestoreGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ FirestoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreClient)
+)
+@mock.patch.object(
+ FirestoreAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(FirestoreAsyncClient),
+)
+def test_firestore_client_client_options(client_class, transport_class, transport_name):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(FirestoreClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(FirestoreClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "true"),
+ (
+ FirestoreAsyncClient,
+ transports.FirestoreGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "false"),
+ (
+ FirestoreAsyncClient,
+ transports.FirestoreGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ FirestoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreClient)
+)
+@mock.patch.object(
+ FirestoreAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(FirestoreAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_firestore_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
+ with mock.patch(
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
+ ):
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"),
+ (
+ FirestoreAsyncClient,
+ transports.FirestoreGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_firestore_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"),
+ (
+ FirestoreAsyncClient,
+ transports.FirestoreGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_firestore_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_firestore_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.firestore_v1.services.firestore.transports.FirestoreGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = FirestoreClient(client_options={"api_endpoint": "squid.clam.whelk"})
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_get_document(
+ transport: str = "grpc", request_type=firestore.GetDocumentRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_document), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = document.Document(name="name_value",)
+
+ response = client.get_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.GetDocumentRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, document.Document)
+
+ assert response.name == "name_value"
+
+
+def test_get_document_from_dict():
+ test_get_document(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_document_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.GetDocumentRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ document.Document(name="name_value",)
+ )
+
+ response = await client.get_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, document.Document)
+
+ assert response.name == "name_value"
+
+
+def test_get_document_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.GetDocumentRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_document), "__call__") as call:
+ call.return_value = document.Document()
+
+ client.get_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_document_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.GetDocumentRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_document), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document())
+
+ await client.get_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_list_documents(
+ transport: str = "grpc", request_type=firestore.ListDocumentsRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_documents), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.ListDocumentsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.ListDocumentsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListDocumentsPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_documents_from_dict():
+ test_list_documents(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_documents_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.ListDocumentsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.ListDocumentsResponse(next_page_token="next_page_token_value",)
+ )
+
+ response = await client.list_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListDocumentsAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_documents_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.ListDocumentsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_documents), "__call__") as call:
+ call.return_value = firestore.ListDocumentsResponse()
+
+ client.list_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_documents_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.ListDocumentsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_documents), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.ListDocumentsResponse()
+ )
+
+ await client.list_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_documents_pager():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_documents), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListDocumentsResponse(
+ documents=[
+ document.Document(),
+ document.Document(),
+ document.Document(),
+ ],
+ next_page_token="abc",
+ ),
+ firestore.ListDocumentsResponse(documents=[], next_page_token="def",),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(),], next_page_token="ghi",
+ ),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(), document.Document(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_documents(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, document.Document) for i in results)
+
+
+def test_list_documents_pages():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_documents), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListDocumentsResponse(
+ documents=[
+ document.Document(),
+ document.Document(),
+ document.Document(),
+ ],
+ next_page_token="abc",
+ ),
+ firestore.ListDocumentsResponse(documents=[], next_page_token="def",),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(),], next_page_token="ghi",
+ ),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(), document.Document(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_documents(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_documents_async_pager():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_documents),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListDocumentsResponse(
+ documents=[
+ document.Document(),
+ document.Document(),
+ document.Document(),
+ ],
+ next_page_token="abc",
+ ),
+ firestore.ListDocumentsResponse(documents=[], next_page_token="def",),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(),], next_page_token="ghi",
+ ),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(), document.Document(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_documents(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, document.Document) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_documents_async_pages():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_documents),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListDocumentsResponse(
+ documents=[
+ document.Document(),
+ document.Document(),
+ document.Document(),
+ ],
+ next_page_token="abc",
+ ),
+ firestore.ListDocumentsResponse(documents=[], next_page_token="def",),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(),], next_page_token="ghi",
+ ),
+ firestore.ListDocumentsResponse(
+ documents=[document.Document(), document.Document(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_documents(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_update_document(
+ transport: str = "grpc", request_type=firestore.UpdateDocumentRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_document), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gf_document.Document(name="name_value",)
+
+ response = client.update_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.UpdateDocumentRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gf_document.Document)
+
+ assert response.name == "name_value"
+
+
+def test_update_document_from_dict():
+ test_update_document(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_update_document_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.UpdateDocumentRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gf_document.Document(name="name_value",)
+ )
+
+ response = await client.update_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gf_document.Document)
+
+ assert response.name == "name_value"
+
+
+def test_update_document_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.UpdateDocumentRequest()
+ request.document.name = "document.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_document), "__call__") as call:
+ call.return_value = gf_document.Document()
+
+ client.update_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "document.name=document.name/value",) in kw[
+ "metadata"
+ ]
+
+
+@pytest.mark.asyncio
+async def test_update_document_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.UpdateDocumentRequest()
+ request.document.name = "document.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_document), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gf_document.Document()
+ )
+
+ await client.update_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "document.name=document.name/value",) in kw[
+ "metadata"
+ ]
+
+
+def test_update_document_flattened():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_document), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gf_document.Document()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.update_document(
+ document=gf_document.Document(name="name_value"),
+ update_mask=common.DocumentMask(field_paths=["field_paths_value"]),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].document == gf_document.Document(name="name_value")
+
+ assert args[0].update_mask == common.DocumentMask(
+ field_paths=["field_paths_value"]
+ )
+
+
+def test_update_document_flattened_error():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_document(
+ firestore.UpdateDocumentRequest(),
+ document=gf_document.Document(name="name_value"),
+ update_mask=common.DocumentMask(field_paths=["field_paths_value"]),
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_document_flattened_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gf_document.Document()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gf_document.Document()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.update_document(
+ document=gf_document.Document(name="name_value"),
+ update_mask=common.DocumentMask(field_paths=["field_paths_value"]),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].document == gf_document.Document(name="name_value")
+
+ assert args[0].update_mask == common.DocumentMask(
+ field_paths=["field_paths_value"]
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_document_flattened_error_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.update_document(
+ firestore.UpdateDocumentRequest(),
+ document=gf_document.Document(name="name_value"),
+ update_mask=common.DocumentMask(field_paths=["field_paths_value"]),
+ )
+
+
+def test_delete_document(
+ transport: str = "grpc", request_type=firestore.DeleteDocumentRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_document), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ response = client.delete_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.DeleteDocumentRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_document_from_dict():
+ test_delete_document(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_delete_document_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.DeleteDocumentRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ response = await client.delete_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_document_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.DeleteDocumentRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_document), "__call__") as call:
+ call.return_value = None
+
+ client.delete_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_document_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.DeleteDocumentRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_document), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ await client.delete_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_document_flattened():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_document), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_document(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_document_flattened_error():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_document(
+ firestore.DeleteDocumentRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_document_flattened_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_document(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_document_flattened_error_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_document(
+ firestore.DeleteDocumentRequest(), name="name_value",
+ )
+
+
+def test_batch_get_documents(
+ transport: str = "grpc", request_type=firestore.BatchGetDocumentsRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.batch_get_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iter([firestore.BatchGetDocumentsResponse()])
+
+ response = client.batch_get_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.BatchGetDocumentsRequest()
+
+ # Establish that the response is the type that we expect.
+ for message in response:
+ assert isinstance(message, firestore.BatchGetDocumentsResponse)
+
+
+def test_batch_get_documents_from_dict():
+ test_batch_get_documents(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_batch_get_documents_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.BatchGetDocumentsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.batch_get_documents), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True)
+ call.return_value.read = mock.AsyncMock(
+ side_effect=[firestore.BatchGetDocumentsResponse()]
+ )
+
+ response = await client.batch_get_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ message = await response.read()
+ assert isinstance(message, firestore.BatchGetDocumentsResponse)
+
+
+def test_batch_get_documents_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.BatchGetDocumentsRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.batch_get_documents), "__call__"
+ ) as call:
+ call.return_value = iter([firestore.BatchGetDocumentsResponse()])
+
+ client.batch_get_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_batch_get_documents_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.BatchGetDocumentsRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.batch_get_documents), "__call__"
+ ) as call:
+ call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True)
+ call.return_value.read = mock.AsyncMock(
+ side_effect=[firestore.BatchGetDocumentsResponse()]
+ )
+
+ await client.batch_get_documents(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+def test_begin_transaction(
+ transport: str = "grpc", request_type=firestore.BeginTransactionRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.BeginTransactionResponse(
+ transaction=b"transaction_blob",
+ )
+
+ response = client.begin_transaction(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.BeginTransactionRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, firestore.BeginTransactionResponse)
+
+ assert response.transaction == b"transaction_blob"
+
+
+def test_begin_transaction_from_dict():
+ test_begin_transaction(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_begin_transaction_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.BeginTransactionRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.BeginTransactionResponse(transaction=b"transaction_blob",)
+ )
+
+ response = await client.begin_transaction(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, firestore.BeginTransactionResponse)
+
+ assert response.transaction == b"transaction_blob"
+
+
+def test_begin_transaction_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.BeginTransactionRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.begin_transaction), "__call__"
+ ) as call:
+ call.return_value = firestore.BeginTransactionResponse()
+
+ client.begin_transaction(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_begin_transaction_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.BeginTransactionRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.begin_transaction), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.BeginTransactionResponse()
+ )
+
+ await client.begin_transaction(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+def test_begin_transaction_flattened():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.BeginTransactionResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.begin_transaction(database="database_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].database == "database_value"
+
+
+def test_begin_transaction_flattened_error():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.begin_transaction(
+ firestore.BeginTransactionRequest(), database="database_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_begin_transaction_flattened_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.begin_transaction), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.BeginTransactionResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.BeginTransactionResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.begin_transaction(database="database_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].database == "database_value"
+
+
+@pytest.mark.asyncio
+async def test_begin_transaction_flattened_error_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.begin_transaction(
+ firestore.BeginTransactionRequest(), database="database_value",
+ )
+
+
+def test_commit(transport: str = "grpc", request_type=firestore.CommitRequest):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.CommitResponse()
+
+ response = client.commit(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.CommitRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, firestore.CommitResponse)
+
+
+def test_commit_from_dict():
+ test_commit(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_commit_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.CommitRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._client._transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.CommitResponse()
+ )
+
+ response = await client.commit(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, firestore.CommitResponse)
+
+
+def test_commit_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.CommitRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.commit), "__call__") as call:
+ call.return_value = firestore.CommitResponse()
+
+ client.commit(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_commit_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.CommitRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._client._transport.commit), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.CommitResponse()
+ )
+
+ await client.commit(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+def test_commit_flattened():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.CommitResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.commit(
+ database="database_value",
+ writes=[gf_write.Write(update=gf_document.Document(name="name_value"))],
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].database == "database_value"
+
+ assert args[0].writes == [
+ gf_write.Write(update=gf_document.Document(name="name_value"))
+ ]
+
+
+def test_commit_flattened_error():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.commit(
+ firestore.CommitRequest(),
+ database="database_value",
+ writes=[gf_write.Write(update=gf_document.Document(name="name_value"))],
+ )
+
+
+@pytest.mark.asyncio
+async def test_commit_flattened_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._client._transport.commit), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.CommitResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.CommitResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.commit(
+ database="database_value",
+ writes=[gf_write.Write(update=gf_document.Document(name="name_value"))],
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].database == "database_value"
+
+ assert args[0].writes == [
+ gf_write.Write(update=gf_document.Document(name="name_value"))
+ ]
+
+
+@pytest.mark.asyncio
+async def test_commit_flattened_error_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.commit(
+ firestore.CommitRequest(),
+ database="database_value",
+ writes=[gf_write.Write(update=gf_document.Document(name="name_value"))],
+ )
+
+
+def test_rollback(transport: str = "grpc", request_type=firestore.RollbackRequest):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.rollback), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ response = client.rollback(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.RollbackRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_rollback_from_dict():
+ test_rollback(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_rollback_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.RollbackRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.rollback), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ response = await client.rollback(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_rollback_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.RollbackRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.rollback), "__call__") as call:
+ call.return_value = None
+
+ client.rollback(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_rollback_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.RollbackRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.rollback), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ await client.rollback(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+def test_rollback_flattened():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.rollback), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.rollback(
+ database="database_value", transaction=b"transaction_blob",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].database == "database_value"
+
+ assert args[0].transaction == b"transaction_blob"
+
+
+def test_rollback_flattened_error():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.rollback(
+ firestore.RollbackRequest(),
+ database="database_value",
+ transaction=b"transaction_blob",
+ )
+
+
+@pytest.mark.asyncio
+async def test_rollback_flattened_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.rollback), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.rollback(
+ database="database_value", transaction=b"transaction_blob",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].database == "database_value"
+
+ assert args[0].transaction == b"transaction_blob"
+
+
+@pytest.mark.asyncio
+async def test_rollback_flattened_error_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.rollback(
+ firestore.RollbackRequest(),
+ database="database_value",
+ transaction=b"transaction_blob",
+ )
+
+
+def test_run_query(transport: str = "grpc", request_type=firestore.RunQueryRequest):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.run_query), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iter([firestore.RunQueryResponse()])
+
+ response = client.run_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.RunQueryRequest()
+
+ # Establish that the response is the type that we expect.
+ for message in response:
+ assert isinstance(message, firestore.RunQueryResponse)
+
+
+def test_run_query_from_dict():
+ test_run_query(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_run_query_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.RunQueryRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.run_query), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True)
+ call.return_value.read = mock.AsyncMock(
+ side_effect=[firestore.RunQueryResponse()]
+ )
+
+ response = await client.run_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ message = await response.read()
+ assert isinstance(message, firestore.RunQueryResponse)
+
+
+def test_run_query_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.RunQueryRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.run_query), "__call__") as call:
+ call.return_value = iter([firestore.RunQueryResponse()])
+
+ client.run_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_run_query_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.RunQueryRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.run_query), "__call__"
+ ) as call:
+ call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True)
+ call.return_value.read = mock.AsyncMock(
+ side_effect=[firestore.RunQueryResponse()]
+ )
+
+ await client.run_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_partition_query(
+ transport: str = "grpc", request_type=firestore.PartitionQueryRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.partition_query), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.PartitionQueryResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.partition_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.PartitionQueryRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.PartitionQueryPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_partition_query_from_dict():
+ test_partition_query(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_partition_query_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.PartitionQueryRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.partition_query), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.PartitionQueryResponse(next_page_token="next_page_token_value",)
+ )
+
+ response = await client.partition_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.PartitionQueryAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_partition_query_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.PartitionQueryRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.partition_query), "__call__") as call:
+ call.return_value = firestore.PartitionQueryResponse()
+
+ client.partition_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_partition_query_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.PartitionQueryRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.partition_query), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.PartitionQueryResponse()
+ )
+
+ await client.partition_query(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_partition_query_pager():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.partition_query), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(), query.Cursor(),],
+ next_page_token="abc",
+ ),
+ firestore.PartitionQueryResponse(partitions=[], next_page_token="def",),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(),], next_page_token="ghi",
+ ),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.partition_query(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, query.Cursor) for i in results)
+
+
+def test_partition_query_pages():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.partition_query), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(), query.Cursor(),],
+ next_page_token="abc",
+ ),
+ firestore.PartitionQueryResponse(partitions=[], next_page_token="def",),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(),], next_page_token="ghi",
+ ),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.partition_query(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_partition_query_async_pager():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.partition_query),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(), query.Cursor(),],
+ next_page_token="abc",
+ ),
+ firestore.PartitionQueryResponse(partitions=[], next_page_token="def",),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(),], next_page_token="ghi",
+ ),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.partition_query(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, query.Cursor) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_partition_query_async_pages():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.partition_query),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(), query.Cursor(),],
+ next_page_token="abc",
+ ),
+ firestore.PartitionQueryResponse(partitions=[], next_page_token="def",),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(),], next_page_token="ghi",
+ ),
+ firestore.PartitionQueryResponse(
+ partitions=[query.Cursor(), query.Cursor(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.partition_query(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_write(transport: str = "grpc", request_type=firestore.WriteRequest):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ requests = [request]
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.write), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iter([firestore.WriteResponse()])
+
+ response = client.write(iter(requests))
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert next(args[0]) == request
+
+ # Establish that the response is the type that we expect.
+ for message in response:
+ assert isinstance(message, firestore.WriteResponse)
+
+
+def test_write_from_dict():
+ test_write(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_write_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.WriteRequest()
+
+ requests = [request]
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._client._transport.write), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True)
+ call.return_value.read = mock.AsyncMock(side_effect=[firestore.WriteResponse()])
+
+ response = await client.write(iter(requests))
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert next(args[0]) == request
+
+ # Establish that the response is the type that we expect.
+ message = await response.read()
+ assert isinstance(message, firestore.WriteResponse)
+
+
+def test_listen(transport: str = "grpc", request_type=firestore.ListenRequest):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ requests = [request]
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.listen), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iter([firestore.ListenResponse()])
+
+ response = client.listen(iter(requests))
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert next(args[0]) == request
+
+ # Establish that the response is the type that we expect.
+ for message in response:
+ assert isinstance(message, firestore.ListenResponse)
+
+
+def test_listen_from_dict():
+ test_listen(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_listen_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.ListenRequest()
+
+ requests = [request]
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._client._transport.listen), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True)
+ call.return_value.read = mock.AsyncMock(
+ side_effect=[firestore.ListenResponse()]
+ )
+
+ response = await client.listen(iter(requests))
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert next(args[0]) == request
+
+ # Establish that the response is the type that we expect.
+ message = await response.read()
+ assert isinstance(message, firestore.ListenResponse)
+
+
+def test_list_collection_ids(
+ transport: str = "grpc", request_type=firestore.ListCollectionIdsRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_collection_ids), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.ListCollectionIdsResponse(
+ collection_ids=["collection_ids_value"],
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_collection_ids(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.ListCollectionIdsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListCollectionIdsPager)
+
+ assert response.collection_ids == ["collection_ids_value"]
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_collection_ids_from_dict():
+ test_list_collection_ids(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_collection_ids_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.ListCollectionIdsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_collection_ids), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.ListCollectionIdsResponse(
+ collection_ids=["collection_ids_value"],
+ next_page_token="next_page_token_value",
+ )
+ )
+
+ response = await client.list_collection_ids(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListCollectionIdsAsyncPager)
+
+ assert response.collection_ids == ["collection_ids_value"]
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_collection_ids_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.ListCollectionIdsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_collection_ids), "__call__"
+ ) as call:
+ call.return_value = firestore.ListCollectionIdsResponse()
+
+ client.list_collection_ids(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_collection_ids_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.ListCollectionIdsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_collection_ids), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.ListCollectionIdsResponse()
+ )
+
+ await client.list_collection_ids(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_collection_ids_flattened():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_collection_ids), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.ListCollectionIdsResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_collection_ids(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_collection_ids_flattened_error():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_collection_ids(
+ firestore.ListCollectionIdsRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_collection_ids_flattened_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_collection_ids), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.ListCollectionIdsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.ListCollectionIdsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_collection_ids(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_collection_ids_flattened_error_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_collection_ids(
+ firestore.ListCollectionIdsRequest(), parent="parent_value",
+ )
+
+
+def test_list_collection_ids_pager():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_collection_ids), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(), str(), str(),], next_page_token="abc",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[], next_page_token="def",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(),], next_page_token="ghi",
+ ),
+ firestore.ListCollectionIdsResponse(collection_ids=[str(), str(),],),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_collection_ids(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, str) for i in results)
+
+
+def test_list_collection_ids_pages():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_collection_ids), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(), str(), str(),], next_page_token="abc",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[], next_page_token="def",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(),], next_page_token="ghi",
+ ),
+ firestore.ListCollectionIdsResponse(collection_ids=[str(), str(),],),
+ RuntimeError,
+ )
+ pages = list(client.list_collection_ids(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_collection_ids_async_pager():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_collection_ids),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(), str(), str(),], next_page_token="abc",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[], next_page_token="def",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(),], next_page_token="ghi",
+ ),
+ firestore.ListCollectionIdsResponse(collection_ids=[str(), str(),],),
+ RuntimeError,
+ )
+ async_pager = await client.list_collection_ids(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, str) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_collection_ids_async_pages():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_collection_ids),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(), str(), str(),], next_page_token="abc",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[], next_page_token="def",
+ ),
+ firestore.ListCollectionIdsResponse(
+ collection_ids=[str(),], next_page_token="ghi",
+ ),
+ firestore.ListCollectionIdsResponse(collection_ids=[str(), str(),],),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_collection_ids(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_batch_write(transport: str = "grpc", request_type=firestore.BatchWriteRequest):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.batch_write), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = firestore.BatchWriteResponse()
+
+ response = client.batch_write(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.BatchWriteRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, firestore.BatchWriteResponse)
+
+
+def test_batch_write_from_dict():
+ test_batch_write(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_batch_write_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.BatchWriteRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.batch_write), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.BatchWriteResponse()
+ )
+
+ response = await client.batch_write(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, firestore.BatchWriteResponse)
+
+
+def test_batch_write_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.BatchWriteRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.batch_write), "__call__") as call:
+ call.return_value = firestore.BatchWriteResponse()
+
+ client.batch_write(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_batch_write_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.BatchWriteRequest()
+ request.database = "database/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.batch_write), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ firestore.BatchWriteResponse()
+ )
+
+ await client.batch_write(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "database=database/value",) in kw["metadata"]
+
+
+def test_create_document(
+ transport: str = "grpc", request_type=firestore.CreateDocumentRequest
+):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_document), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = document.Document(name="name_value",)
+
+ response = client.create_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.CreateDocumentRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, document.Document)
+
+ assert response.name == "name_value"
+
+
+def test_create_document_from_dict():
+ test_create_document(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_create_document_async(transport: str = "grpc_asyncio"):
+ client = FirestoreAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = firestore.CreateDocumentRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_document), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ document.Document(name="name_value",)
+ )
+
+ response = await client.create_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, document.Document)
+
+ assert response.name == "name_value"
+
+
+def test_create_document_field_headers():
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.CreateDocumentRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_document), "__call__") as call:
+ call.return_value = document.Document()
+
+ client.create_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_document_field_headers_async():
+ client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = firestore.CreateDocumentRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_document), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document())
+
+ await client.create_document(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.FirestoreGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.FirestoreGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = FirestoreClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.FirestoreGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = FirestoreClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.FirestoreGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = FirestoreClient(transport=transport)
+ assert client._transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.FirestoreGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.FirestoreGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = FirestoreClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client._transport, transports.FirestoreGrpcTransport,)
+
+
+def test_firestore_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.FirestoreTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_firestore_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.FirestoreTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "get_document",
+ "list_documents",
+ "update_document",
+ "delete_document",
+ "batch_get_documents",
+ "begin_transaction",
+ "commit",
+ "rollback",
+ "run_query",
+ "partition_query",
+ "write",
+ "listen",
+ "list_collection_ids",
+ "batch_write",
+ "create_document",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+
+def test_firestore_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.FirestoreTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_firestore_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.FirestoreTransport()
+ adc.assert_called_once()
+
+
+def test_firestore_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ FirestoreClient()
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ quota_project_id=None,
+ )
+
+
+def test_firestore_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.FirestoreGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_firestore_host_no_port():
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="firestore.googleapis.com"
+ ),
+ )
+ assert client._transport._host == "firestore.googleapis.com:443"
+
+
+def test_firestore_host_with_port():
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="firestore.googleapis.com:8000"
+ ),
+ )
+ assert client._transport._host == "firestore.googleapis.com:8000"
+
+
+def test_firestore_grpc_transport_channel():
+ channel = grpc.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.FirestoreGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+def test_firestore_grpc_asyncio_transport_channel():
+ channel = aio.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.FirestoreGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
+)
+def test_firestore_transport_channel_mtls_with_client_cert_source(transport_class):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
+)
+def test_firestore_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.FirestoreTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.FirestoreTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = FirestoreClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/google/cloud/firestore_admin_v1/gapic/transports/__init__.py b/tests/unit/gapic/v1/__init__.py
similarity index 100%
rename from google/cloud/firestore_admin_v1/gapic/transports/__init__.py
rename to tests/unit/gapic/v1/__init__.py
diff --git a/tests/unit/gapic/v1/test_firestore_admin_client_v1.py b/tests/unit/gapic/v1/test_firestore_admin_client_v1.py
deleted file mode 100644
index 9a731130d2..0000000000
--- a/tests/unit/gapic/v1/test_firestore_admin_client_v1.py
+++ /dev/null
@@ -1,430 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Unit tests."""
-
-import mock
-import pytest
-
-from google.cloud import firestore_admin_v1
-from google.cloud.firestore_admin_v1.proto import field_pb2
-from google.cloud.firestore_admin_v1.proto import firestore_admin_pb2
-from google.cloud.firestore_admin_v1.proto import index_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-
-
-class MultiCallableStub(object):
- """Stub for the grpc.UnaryUnaryMultiCallable interface."""
-
- def __init__(self, method, channel_stub):
- self.method = method
- self.channel_stub = channel_stub
-
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
- self.channel_stub.requests.append((self.method, request))
-
- response = None
- if self.channel_stub.responses:
- response = self.channel_stub.responses.pop()
-
- if isinstance(response, Exception):
- raise response
-
- if response:
- return response
-
-
-class ChannelStub(object):
- """Stub for the grpc.Channel interface."""
-
- def __init__(self, responses=[]):
- self.responses = responses
- self.requests = []
-
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
-
-class CustomException(Exception):
- pass
-
-
-class TestFirestoreAdminClient(object):
- def test_create_index(self):
- # Setup Expected Response
- name = "name3373707"
- done = True
- expected_response = {"name": name, "done": done}
- expected_response = operations_pb2.Operation(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- parent = client.parent_path("[PROJECT]", "[DATABASE]", "[COLLECTION_ID]")
- index = {}
-
- response = client.create_index(parent, index)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.CreateIndexRequest(
- parent=parent, index=index
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_index_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- parent = client.parent_path("[PROJECT]", "[DATABASE]", "[COLLECTION_ID]")
- index = {}
-
- with pytest.raises(CustomException):
- client.create_index(parent, index)
-
- def test_list_indexes(self):
- # Setup Expected Response
- next_page_token = ""
- indexes_element = {}
- indexes = [indexes_element]
- expected_response = {"next_page_token": next_page_token, "indexes": indexes}
- expected_response = firestore_admin_pb2.ListIndexesResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- parent = client.parent_path("[PROJECT]", "[DATABASE]", "[COLLECTION_ID]")
-
- paged_list_response = client.list_indexes(parent)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.indexes[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.ListIndexesRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_indexes_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- parent = client.parent_path("[PROJECT]", "[DATABASE]", "[COLLECTION_ID]")
-
- paged_list_response = client.list_indexes(parent)
- with pytest.raises(CustomException):
- list(paged_list_response)
-
- def test_get_index(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- expected_response = {"name": name_2}
- expected_response = index_pb2.Index(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- name = client.index_path(
- "[PROJECT]", "[DATABASE]", "[COLLECTION_ID]", "[INDEX_ID]"
- )
-
- response = client.get_index(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.GetIndexRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_index_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- name = client.index_path(
- "[PROJECT]", "[DATABASE]", "[COLLECTION_ID]", "[INDEX_ID]"
- )
-
- with pytest.raises(CustomException):
- client.get_index(name)
-
- def test_delete_index(self):
- channel = ChannelStub()
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- name = client.index_path(
- "[PROJECT]", "[DATABASE]", "[COLLECTION_ID]", "[INDEX_ID]"
- )
-
- client.delete_index(name)
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.DeleteIndexRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_delete_index_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- name = client.index_path(
- "[PROJECT]", "[DATABASE]", "[COLLECTION_ID]", "[INDEX_ID]"
- )
-
- with pytest.raises(CustomException):
- client.delete_index(name)
-
- def test_import_documents(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- done = True
- expected_response = {"name": name_2, "done": done}
- expected_response = operations_pb2.Operation(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- name = client.database_path("[PROJECT]", "[DATABASE]")
-
- response = client.import_documents(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.ImportDocumentsRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_import_documents_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- name = client.database_path("[PROJECT]", "[DATABASE]")
-
- with pytest.raises(CustomException):
- client.import_documents(name)
-
- def test_export_documents(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- done = True
- expected_response = {"name": name_2, "done": done}
- expected_response = operations_pb2.Operation(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- name = client.database_path("[PROJECT]", "[DATABASE]")
-
- response = client.export_documents(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.ExportDocumentsRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_export_documents_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- name = client.database_path("[PROJECT]", "[DATABASE]")
-
- with pytest.raises(CustomException):
- client.export_documents(name)
-
- def test_get_field(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- expected_response = {"name": name_2}
- expected_response = field_pb2.Field(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- name = client.field_path(
- "[PROJECT]", "[DATABASE]", "[COLLECTION_ID]", "[FIELD_ID]"
- )
-
- response = client.get_field(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.GetFieldRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_field_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- name = client.field_path(
- "[PROJECT]", "[DATABASE]", "[COLLECTION_ID]", "[FIELD_ID]"
- )
-
- with pytest.raises(CustomException):
- client.get_field(name)
-
- def test_list_fields(self):
- # Setup Expected Response
- next_page_token = ""
- fields_element = {}
- fields = [fields_element]
- expected_response = {"next_page_token": next_page_token, "fields": fields}
- expected_response = firestore_admin_pb2.ListFieldsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- parent = client.parent_path("[PROJECT]", "[DATABASE]", "[COLLECTION_ID]")
-
- paged_list_response = client.list_fields(parent)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.fields[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.ListFieldsRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_fields_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- parent = client.parent_path("[PROJECT]", "[DATABASE]", "[COLLECTION_ID]")
-
- paged_list_response = client.list_fields(parent)
- with pytest.raises(CustomException):
- list(paged_list_response)
-
- def test_update_field(self):
- # Setup Expected Response
- name = "name3373707"
- done = True
- expected_response = {"name": name, "done": done}
- expected_response = operations_pb2.Operation(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup Request
- field = {}
-
- response = client.update_field(field)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_admin_pb2.UpdateFieldRequest(field=field)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_update_field_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_admin_v1.FirestoreAdminClient()
-
- # Setup request
- field = {}
-
- with pytest.raises(CustomException):
- client.update_field(field)
diff --git a/tests/unit/gapic/v1/test_firestore_client_v1.py b/tests/unit/gapic/v1/test_firestore_client_v1.py
deleted file mode 100644
index 8e345da1af..0000000000
--- a/tests/unit/gapic/v1/test_firestore_client_v1.py
+++ /dev/null
@@ -1,646 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Unit tests."""
-
-import mock
-import pytest
-
-from google.cloud.firestore_v1.gapic import firestore_client
-from google.cloud.firestore_v1.proto import common_pb2
-from google.cloud.firestore_v1.proto import document_pb2
-from google.cloud.firestore_v1.proto import firestore_pb2
-from google.protobuf import empty_pb2
-
-
-class MultiCallableStub(object):
- """Stub for the grpc.UnaryUnaryMultiCallable interface."""
-
- def __init__(self, method, channel_stub):
- self.method = method
- self.channel_stub = channel_stub
-
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
- self.channel_stub.requests.append((self.method, request))
-
- response = None
- if self.channel_stub.responses:
- response = self.channel_stub.responses.pop()
-
- if isinstance(response, Exception):
- raise response
-
- if response:
- return response
-
-
-class ChannelStub(object):
- """Stub for the grpc.Channel interface."""
-
- def __init__(self, responses=[]):
- self.responses = responses
- self.requests = []
-
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
- def unary_stream(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
- def stream_stream(
- self, method, request_serializer=None, response_deserializer=None
- ):
- return MultiCallableStub(method, self)
-
-
-class CustomException(Exception):
- pass
-
-
-class TestFirestoreClient(object):
- def test_get_document(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- expected_response = {"name": name_2}
- expected_response = document_pb2.Document(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- response = client.get_document(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.GetDocumentRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- with pytest.raises(CustomException):
- client.get_document(name)
-
- def test_list_documents(self):
- # Setup Expected Response
- next_page_token = ""
- documents_element = {}
- documents = [documents_element]
- expected_response = {"next_page_token": next_page_token, "documents": documents}
- expected_response = firestore_pb2.ListDocumentsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
-
- paged_list_response = client.list_documents(parent, collection_id)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.documents[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.ListDocumentsRequest(
- parent=parent, collection_id=collection_id
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_documents_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
-
- paged_list_response = client.list_documents(parent, collection_id)
- with pytest.raises(CustomException):
- list(paged_list_response)
-
- def test_create_document(self):
- # Setup Expected Response
- name = "name3373707"
- expected_response = {"name": name}
- expected_response = document_pb2.Document(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
- document_id = "documentId506676927"
- document = {}
-
- response = client.create_document(parent, collection_id, document_id, document)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.CreateDocumentRequest(
- parent=parent,
- collection_id=collection_id,
- document_id=document_id,
- document=document,
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
- document_id = "documentId506676927"
- document = {}
-
- with pytest.raises(CustomException):
- client.create_document(parent, collection_id, document_id, document)
-
- def test_update_document(self):
- # Setup Expected Response
- name = "name3373707"
- expected_response = {"name": name}
- expected_response = document_pb2.Document(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- document = {}
- update_mask = {}
-
- response = client.update_document(document, update_mask)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.UpdateDocumentRequest(
- document=document, update_mask=update_mask
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_update_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- document = {}
- update_mask = {}
-
- with pytest.raises(CustomException):
- client.update_document(document, update_mask)
-
- def test_delete_document(self):
- channel = ChannelStub()
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- client.delete_document(name)
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.DeleteDocumentRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_delete_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- with pytest.raises(CustomException):
- client.delete_document(name)
-
- def test_batch_get_documents(self):
- # Setup Expected Response
- missing = "missing1069449574"
- transaction = b"-34"
- expected_response = {"missing": missing, "transaction": transaction}
- expected_response = firestore_pb2.BatchGetDocumentsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- documents = []
-
- response = client.batch_get_documents(database, documents)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.BatchGetDocumentsRequest(
- database=database, documents=documents
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_batch_get_documents_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- documents = []
-
- with pytest.raises(CustomException):
- client.batch_get_documents(database, documents)
-
- def test_begin_transaction(self):
- # Setup Expected Response
- transaction = b"-34"
- expected_response = {"transaction": transaction}
- expected_response = firestore_pb2.BeginTransactionResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
-
- response = client.begin_transaction(database)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.BeginTransactionRequest(database=database)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_begin_transaction_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
-
- with pytest.raises(CustomException):
- client.begin_transaction(database)
-
- def test_commit(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = firestore_pb2.CommitResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- writes = []
-
- response = client.commit(database, writes)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.CommitRequest(database=database, writes=writes)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_commit_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- writes = []
-
- with pytest.raises(CustomException):
- client.commit(database, writes)
-
- def test_rollback(self):
- channel = ChannelStub()
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- transaction = b"-34"
-
- client.rollback(database, transaction)
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.RollbackRequest(
- database=database, transaction=transaction
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_rollback_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- transaction = b"-34"
-
- with pytest.raises(CustomException):
- client.rollback(database, transaction)
-
- def test_run_query(self):
- # Setup Expected Response
- transaction = b"-34"
- skipped_results = 880286183
- expected_response = {
- "transaction": transaction,
- "skipped_results": skipped_results,
- }
- expected_response = firestore_pb2.RunQueryResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- response = client.run_query(parent)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.RunQueryRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_run_query_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- with pytest.raises(CustomException):
- client.run_query(parent)
-
- def test_write(self):
- # Setup Expected Response
- stream_id = "streamId-315624902"
- stream_token = b"122"
- expected_response = {"stream_id": stream_id, "stream_token": stream_token}
- expected_response = firestore_pb2.WriteResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
- request = firestore_pb2.WriteRequest(**request)
- requests = [request]
-
- response = client.write(requests)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- actual_requests = channel.requests[0][1]
- assert len(actual_requests) == 1
- actual_request = list(actual_requests)[0]
- assert request == actual_request
-
- def test_write_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
-
- request = firestore_pb2.WriteRequest(**request)
- requests = [request]
-
- with pytest.raises(CustomException):
- client.write(requests)
-
- def test_listen(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = firestore_pb2.ListenResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
- request = firestore_pb2.ListenRequest(**request)
- requests = [request]
-
- response = client.listen(requests)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- actual_requests = channel.requests[0][1]
- assert len(actual_requests) == 1
- actual_request = list(actual_requests)[0]
- assert request == actual_request
-
- def test_listen_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
-
- request = firestore_pb2.ListenRequest(**request)
- requests = [request]
-
- with pytest.raises(CustomException):
- client.listen(requests)
-
- def test_list_collection_ids(self):
- # Setup Expected Response
- next_page_token = ""
- collection_ids_element = "collectionIdsElement1368994900"
- collection_ids = [collection_ids_element]
- expected_response = {
- "next_page_token": next_page_token,
- "collection_ids": collection_ids,
- }
- expected_response = firestore_pb2.ListCollectionIdsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- paged_list_response = client.list_collection_ids(parent)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.collection_ids[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.ListCollectionIdsRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_collection_ids_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- paged_list_response = client.list_collection_ids(parent)
- with pytest.raises(CustomException):
- list(paged_list_response)
diff --git a/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py
deleted file mode 100644
index f7bf05814d..0000000000
--- a/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py
+++ /dev/null
@@ -1,646 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Unit tests."""
-
-import mock
-import pytest
-
-from google.cloud.firestore_v1beta1.gapic import firestore_client
-from google.cloud.firestore_v1beta1.proto import common_pb2
-from google.cloud.firestore_v1beta1.proto import document_pb2
-from google.cloud.firestore_v1beta1.proto import firestore_pb2
-from google.protobuf import empty_pb2
-
-
-class MultiCallableStub(object):
- """Stub for the grpc.UnaryUnaryMultiCallable interface."""
-
- def __init__(self, method, channel_stub):
- self.method = method
- self.channel_stub = channel_stub
-
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
- self.channel_stub.requests.append((self.method, request))
-
- response = None
- if self.channel_stub.responses:
- response = self.channel_stub.responses.pop()
-
- if isinstance(response, Exception):
- raise response
-
- if response:
- return response
-
-
-class ChannelStub(object):
- """Stub for the grpc.Channel interface."""
-
- def __init__(self, responses=[]):
- self.responses = responses
- self.requests = []
-
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
- def unary_stream(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
- def stream_stream(
- self, method, request_serializer=None, response_deserializer=None
- ):
- return MultiCallableStub(method, self)
-
-
-class CustomException(Exception):
- pass
-
-
-class TestFirestoreClient(object):
- def test_get_document(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- expected_response = {"name": name_2}
- expected_response = document_pb2.Document(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- response = client.get_document(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.GetDocumentRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- with pytest.raises(CustomException):
- client.get_document(name)
-
- def test_list_documents(self):
- # Setup Expected Response
- next_page_token = ""
- documents_element = {}
- documents = [documents_element]
- expected_response = {"next_page_token": next_page_token, "documents": documents}
- expected_response = firestore_pb2.ListDocumentsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
-
- paged_list_response = client.list_documents(parent, collection_id)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.documents[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.ListDocumentsRequest(
- parent=parent, collection_id=collection_id
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_documents_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
-
- paged_list_response = client.list_documents(parent, collection_id)
- with pytest.raises(CustomException):
- list(paged_list_response)
-
- def test_create_document(self):
- # Setup Expected Response
- name = "name3373707"
- expected_response = {"name": name}
- expected_response = document_pb2.Document(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
- document_id = "documentId506676927"
- document = {}
-
- response = client.create_document(parent, collection_id, document_id, document)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.CreateDocumentRequest(
- parent=parent,
- collection_id=collection_id,
- document_id=document_id,
- document=document,
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
- collection_id = "collectionId-821242276"
- document_id = "documentId506676927"
- document = {}
-
- with pytest.raises(CustomException):
- client.create_document(parent, collection_id, document_id, document)
-
- def test_update_document(self):
- # Setup Expected Response
- name = "name3373707"
- expected_response = {"name": name}
- expected_response = document_pb2.Document(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- document = {}
- update_mask = {}
-
- response = client.update_document(document, update_mask)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.UpdateDocumentRequest(
- document=document, update_mask=update_mask
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_update_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- document = {}
- update_mask = {}
-
- with pytest.raises(CustomException):
- client.update_document(document, update_mask)
-
- def test_delete_document(self):
- channel = ChannelStub()
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- client.delete_document(name)
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.DeleteDocumentRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_delete_document_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- name = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- with pytest.raises(CustomException):
- client.delete_document(name)
-
- def test_batch_get_documents(self):
- # Setup Expected Response
- missing = "missing1069449574"
- transaction = b"-34"
- expected_response = {"missing": missing, "transaction": transaction}
- expected_response = firestore_pb2.BatchGetDocumentsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- documents = []
-
- response = client.batch_get_documents(database, documents)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.BatchGetDocumentsRequest(
- database=database, documents=documents
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_batch_get_documents_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- documents = []
-
- with pytest.raises(CustomException):
- client.batch_get_documents(database, documents)
-
- def test_begin_transaction(self):
- # Setup Expected Response
- transaction = b"-34"
- expected_response = {"transaction": transaction}
- expected_response = firestore_pb2.BeginTransactionResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
-
- response = client.begin_transaction(database)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.BeginTransactionRequest(database=database)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_begin_transaction_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
-
- with pytest.raises(CustomException):
- client.begin_transaction(database)
-
- def test_commit(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = firestore_pb2.CommitResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- writes = []
-
- response = client.commit(database, writes)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.CommitRequest(database=database, writes=writes)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_commit_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- writes = []
-
- with pytest.raises(CustomException):
- client.commit(database, writes)
-
- def test_rollback(self):
- channel = ChannelStub()
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- transaction = b"-34"
-
- client.rollback(database, transaction)
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.RollbackRequest(
- database=database, transaction=transaction
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_rollback_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- transaction = b"-34"
-
- with pytest.raises(CustomException):
- client.rollback(database, transaction)
-
- def test_run_query(self):
- # Setup Expected Response
- transaction = b"-34"
- skipped_results = 880286183
- expected_response = {
- "transaction": transaction,
- "skipped_results": skipped_results,
- }
- expected_response = firestore_pb2.RunQueryResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- response = client.run_query(parent)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.RunQueryRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_run_query_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- with pytest.raises(CustomException):
- client.run_query(parent)
-
- def test_write(self):
- # Setup Expected Response
- stream_id = "streamId-315624902"
- stream_token = b"122"
- expected_response = {"stream_id": stream_id, "stream_token": stream_token}
- expected_response = firestore_pb2.WriteResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
- request = firestore_pb2.WriteRequest(**request)
- requests = [request]
-
- response = client.write(requests)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- actual_requests = channel.requests[0][1]
- assert len(actual_requests) == 1
- actual_request = list(actual_requests)[0]
- assert request == actual_request
-
- def test_write_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
-
- request = firestore_pb2.WriteRequest(**request)
- requests = [request]
-
- with pytest.raises(CustomException):
- client.write(requests)
-
- def test_listen(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = firestore_pb2.ListenResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[iter([expected_response])])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
- request = firestore_pb2.ListenRequest(**request)
- requests = [request]
-
- response = client.listen(requests)
- resources = list(response)
- assert len(resources) == 1
- assert expected_response == resources[0]
-
- assert len(channel.requests) == 1
- actual_requests = channel.requests[0][1]
- assert len(actual_requests) == 1
- actual_request = list(actual_requests)[0]
- assert request == actual_request
-
- def test_listen_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- database = client.database_root_path("[PROJECT]", "[DATABASE]")
- request = {"database": database}
-
- request = firestore_pb2.ListenRequest(**request)
- requests = [request]
-
- with pytest.raises(CustomException):
- client.listen(requests)
-
- def test_list_collection_ids(self):
- # Setup Expected Response
- next_page_token = ""
- collection_ids_element = "collectionIdsElement1368994900"
- collection_ids = [collection_ids_element]
- expected_response = {
- "next_page_token": next_page_token,
- "collection_ids": collection_ids,
- }
- expected_response = firestore_pb2.ListCollectionIdsResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup Request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- paged_list_response = client.list_collection_ids(parent)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.collection_ids[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = firestore_pb2.ListCollectionIdsRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_collection_ids_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = firestore_client.FirestoreClient()
-
- # Setup request
- parent = client.any_path_path(
- "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]"
- )
-
- paged_list_response = client.list_collection_ids(parent)
- with pytest.raises(CustomException):
- list(paged_list_response)
diff --git a/tests/unit/v1/conformance_tests.py b/tests/unit/v1/conformance_tests.py
new file mode 100644
index 0000000000..0718f8e5f4
--- /dev/null
+++ b/tests/unit/v1/conformance_tests.py
@@ -0,0 +1,531 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_v1.types import common
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import query as gcf_query
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="tests.unit.v1",
+ manifest={
+ "TestFile",
+ "Test",
+ "GetTest",
+ "CreateTest",
+ "SetTest",
+ "UpdateTest",
+ "UpdatePathsTest",
+ "DeleteTest",
+ "SetOption",
+ "QueryTest",
+ "Clause",
+ "Select",
+ "Where",
+ "OrderBy",
+ "Cursor_",
+ "DocSnapshot",
+ "FieldPath",
+ "ListenTest",
+ "Snapshot",
+ "DocChange",
+ },
+)
+
+
+class TestFile(proto.Message):
+ r"""A collection of tests.
+
+ Attributes:
+ tests (Sequence[~.gcf_tests.Test]):
+
+ """
+
+ tests = proto.RepeatedField(proto.MESSAGE, number=1, message="Test",)
+
+
+class Test(proto.Message):
+ r"""A Test describes a single client method call and its expected
+ result.
+
+ Attributes:
+ description (str):
+ short description of the test
+ comment (str):
+ a comment describing the behavior being
+ tested
+ get (~.gcf_tests.GetTest):
+
+ create (~.gcf_tests.CreateTest):
+
+ set_ (~.gcf_tests.SetTest):
+
+ update (~.gcf_tests.UpdateTest):
+
+ update_paths (~.gcf_tests.UpdatePathsTest):
+
+ delete (~.gcf_tests.DeleteTest):
+
+ query (~.gcf_tests.QueryTest):
+
+ listen (~.gcf_tests.ListenTest):
+
+ """
+
+ description = proto.Field(proto.STRING, number=1)
+
+ comment = proto.Field(proto.STRING, number=10)
+
+ get = proto.Field(proto.MESSAGE, number=2, oneof="test", message="GetTest",)
+
+ create = proto.Field(proto.MESSAGE, number=3, oneof="test", message="CreateTest",)
+
+ set_ = proto.Field(proto.MESSAGE, number=4, oneof="test", message="SetTest",)
+
+ update = proto.Field(proto.MESSAGE, number=5, oneof="test", message="UpdateTest",)
+
+ update_paths = proto.Field(
+ proto.MESSAGE, number=6, oneof="test", message="UpdatePathsTest",
+ )
+
+ delete = proto.Field(proto.MESSAGE, number=7, oneof="test", message="DeleteTest",)
+
+ query = proto.Field(proto.MESSAGE, number=8, oneof="test", message="QueryTest",)
+
+ listen = proto.Field(proto.MESSAGE, number=9, oneof="test", message="ListenTest",)
+
+
+class GetTest(proto.Message):
+ r"""Call to the DocumentRef.Get method.
+
+ Attributes:
+ doc_ref_path (str):
+ The path of the doc, e.g.
+ "projects/projectID/databases/(default)/documents/C/d".
+ request (~.firestore.GetDocumentRequest):
+ The request that the call should send to the
+ Firestore service.
+ """
+
+ doc_ref_path = proto.Field(proto.STRING, number=1)
+
+ request = proto.Field(
+ proto.MESSAGE, number=2, message=firestore.GetDocumentRequest,
+ )
+
+
+class CreateTest(proto.Message):
+ r"""Call to DocumentRef.Create.
+
+ Attributes:
+ doc_ref_path (str):
+ The path of the doc, e.g.
+ "projects/projectID/databases/(default)/documents/C/d".
+ json_data (str):
+ The data passed to Create, as JSON. The
+ strings "Delete" and "ServerTimestamp" denote
+ the two special sentinel values. Values that
+ could be interpreted as integers (i.e. digit
+ strings) should be treated as integers.
+ request (~.firestore.CommitRequest):
+ The request that the call should generate.
+ is_error (bool):
+ If true, the call should result in an error
+ without generating a request. If this is true,
+ request should not be set.
+ """
+
+ doc_ref_path = proto.Field(proto.STRING, number=1)
+
+ json_data = proto.Field(proto.STRING, number=2)
+
+ request = proto.Field(proto.MESSAGE, number=3, message=firestore.CommitRequest,)
+
+ is_error = proto.Field(proto.BOOL, number=4)
+
+
+class SetTest(proto.Message):
+ r"""A call to DocumentRef.Set.
+
+ Attributes:
+ doc_ref_path (str):
+ path of doc
+ option (~.gcf_tests.SetOption):
+ option to the Set call, if any
+ json_data (str):
+ data (see CreateTest.json_data)
+ request (~.firestore.CommitRequest):
+ expected request
+ is_error (bool):
+ call signals an error
+ """
+
+ doc_ref_path = proto.Field(proto.STRING, number=1)
+
+ option = proto.Field(proto.MESSAGE, number=2, message="SetOption",)
+
+ json_data = proto.Field(proto.STRING, number=3)
+
+ request = proto.Field(proto.MESSAGE, number=4, message=firestore.CommitRequest,)
+
+ is_error = proto.Field(proto.BOOL, number=5)
+
+
+class UpdateTest(proto.Message):
+ r"""A call to the form of DocumentRef.Update that represents the
+ data as a map or dictionary.
+
+ Attributes:
+ doc_ref_path (str):
+ path of doc
+ precondition (~.common.Precondition):
+ precondition in call, if any
+ json_data (str):
+ data (see CreateTest.json_data)
+ request (~.firestore.CommitRequest):
+ expected request
+ is_error (bool):
+ call signals an error
+ """
+
+ doc_ref_path = proto.Field(proto.STRING, number=1)
+
+ precondition = proto.Field(proto.MESSAGE, number=2, message=common.Precondition,)
+
+ json_data = proto.Field(proto.STRING, number=3)
+
+ request = proto.Field(proto.MESSAGE, number=4, message=firestore.CommitRequest,)
+
+ is_error = proto.Field(proto.BOOL, number=5)
+
+
+class UpdatePathsTest(proto.Message):
+ r"""A call to the form of DocumentRef.Update that represents the
+ data as a list of field paths and their values.
+
+ Attributes:
+ doc_ref_path (str):
+ path of doc
+ precondition (~.common.Precondition):
+ precondition in call, if any
+ field_paths (Sequence[~.gcf_tests.FieldPath]):
+ parallel sequences: field_paths[i] corresponds to
+ json_values[i]
+ json_values (Sequence[str]):
+ the argument values, as JSON
+ request (~.firestore.CommitRequest):
+ expected rquest
+ is_error (bool):
+ call signals an error
+ """
+
+ doc_ref_path = proto.Field(proto.STRING, number=1)
+
+ precondition = proto.Field(proto.MESSAGE, number=2, message=common.Precondition,)
+
+ field_paths = proto.RepeatedField(proto.MESSAGE, number=3, message="FieldPath",)
+
+ json_values = proto.RepeatedField(proto.STRING, number=4)
+
+ request = proto.Field(proto.MESSAGE, number=5, message=firestore.CommitRequest,)
+
+ is_error = proto.Field(proto.BOOL, number=6)
+
+
+class DeleteTest(proto.Message):
+ r"""A call to DocmentRef.Delete
+
+ Attributes:
+ doc_ref_path (str):
+ path of doc
+ precondition (~.common.Precondition):
+
+ request (~.firestore.CommitRequest):
+ expected rquest
+ is_error (bool):
+ call signals an error
+ """
+
+ doc_ref_path = proto.Field(proto.STRING, number=1)
+
+ precondition = proto.Field(proto.MESSAGE, number=2, message=common.Precondition,)
+
+ request = proto.Field(proto.MESSAGE, number=3, message=firestore.CommitRequest,)
+
+ is_error = proto.Field(proto.BOOL, number=4)
+
+
+class SetOption(proto.Message):
+ r"""An option to the DocumentRef.Set call.
+
+ Attributes:
+ all_ (bool):
+ if true, merge all fields ("fields" is
+ ignored).
+ fields (Sequence[~.gcf_tests.FieldPath]):
+ field paths for a Merge option
+ """
+
+ all_ = proto.Field(proto.BOOL, number=1)
+
+ fields = proto.RepeatedField(proto.MESSAGE, number=2, message="FieldPath",)
+
+
+class QueryTest(proto.Message):
+ r"""
+
+ Attributes:
+ coll_path (str):
+ path of collection, e.g.
+ "projects/projectID/databases/(default)/documents/C".
+ clauses (Sequence[~.gcf_tests.Clause]):
+
+ query (~.gcf_query.StructuredQuery):
+
+ is_error (bool):
+
+ """
+
+ coll_path = proto.Field(proto.STRING, number=1)
+
+ clauses = proto.RepeatedField(proto.MESSAGE, number=2, message="Clause",)
+
+ query = proto.Field(proto.MESSAGE, number=3, message=gcf_query.StructuredQuery,)
+
+ is_error = proto.Field(proto.BOOL, number=4)
+
+
+class Clause(proto.Message):
+ r"""
+
+ Attributes:
+ select (~.gcf_tests.Select):
+
+ where (~.gcf_tests.Where):
+
+ order_by (~.gcf_tests.OrderBy):
+
+ offset (int):
+
+ limit (int):
+
+ start_at (~.gcf_tests.Cursor_):
+
+ start_after (~.gcf_tests.Cursor_):
+
+ end_at (~.gcf_tests.Cursor_):
+
+ end_before (~.gcf_tests.Cursor_):
+
+ """
+
+ select = proto.Field(proto.MESSAGE, number=1, oneof="clause", message="Select",)
+
+ where = proto.Field(proto.MESSAGE, number=2, oneof="clause", message="Where",)
+
+ order_by = proto.Field(proto.MESSAGE, number=3, oneof="clause", message="OrderBy",)
+
+ offset = proto.Field(proto.INT32, number=4, oneof="clause")
+
+ limit = proto.Field(proto.INT32, number=5, oneof="clause")
+
+ start_at = proto.Field(proto.MESSAGE, number=6, oneof="clause", message="Cursor_",)
+
+ start_after = proto.Field(
+ proto.MESSAGE, number=7, oneof="clause", message="Cursor_",
+ )
+
+ end_at = proto.Field(proto.MESSAGE, number=8, oneof="clause", message="Cursor_",)
+
+ end_before = proto.Field(
+ proto.MESSAGE, number=9, oneof="clause", message="Cursor_",
+ )
+
+
+class Select(proto.Message):
+ r"""
+
+ Attributes:
+ fields (Sequence[~.gcf_tests.FieldPath]):
+
+ """
+
+ fields = proto.RepeatedField(proto.MESSAGE, number=1, message="FieldPath",)
+
+
+class Where(proto.Message):
+ r"""
+
+ Attributes:
+ path (~.gcf_tests.FieldPath):
+
+ op (str):
+
+ json_value (str):
+
+ """
+
+ path = proto.Field(proto.MESSAGE, number=1, message="FieldPath",)
+
+ op = proto.Field(proto.STRING, number=2)
+
+ json_value = proto.Field(proto.STRING, number=3)
+
+
+class OrderBy(proto.Message):
+ r"""
+
+ Attributes:
+ path (~.gcf_tests.FieldPath):
+
+ direction (str):
+ "asc" or "desc".
+ """
+
+ path = proto.Field(proto.MESSAGE, number=1, message="FieldPath",)
+
+ direction = proto.Field(proto.STRING, number=2)
+
+
+class Cursor_(proto.Message):
+ r"""
+
+ Attributes:
+ doc_snapshot (~.gcf_tests.DocSnapshot):
+ one of:
+ json_values (Sequence[str]):
+
+ """
+
+ doc_snapshot = proto.Field(proto.MESSAGE, number=1, message="DocSnapshot",)
+
+ json_values = proto.RepeatedField(proto.STRING, number=2)
+
+
+class DocSnapshot(proto.Message):
+ r"""
+
+ Attributes:
+ path (str):
+
+ json_data (str):
+
+ """
+
+ path = proto.Field(proto.STRING, number=1)
+
+ json_data = proto.Field(proto.STRING, number=2)
+
+
+class FieldPath(proto.Message):
+ r"""
+
+ Attributes:
+ field (Sequence[str]):
+
+ """
+
+ field = proto.RepeatedField(proto.STRING, number=1)
+
+
+class ListenTest(proto.Message):
+ r"""A test of the Listen streaming RPC (a.k.a. FireStore watch). If the
+ sequence of responses is provided to the implementation, it should
+ produce the sequence of snapshots. If is_error is true, an error
+ should occur after the snapshots.
+
+ The tests assume that the query is
+ Collection("projects/projectID/databases/(default)/documents/C").OrderBy("a",
+ Ascending)
+
+ The watch target ID used in these tests is 1. Test interpreters
+ should either change their client's ID for testing, or change the ID
+ in the tests before running them.
+
+ Attributes:
+ responses (Sequence[~.firestore.ListenResponse]):
+
+ snapshots (Sequence[~.gcf_tests.Snapshot]):
+
+ is_error (bool):
+
+ """
+
+ responses = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=firestore.ListenResponse,
+ )
+
+ snapshots = proto.RepeatedField(proto.MESSAGE, number=2, message="Snapshot",)
+
+ is_error = proto.Field(proto.BOOL, number=3)
+
+
+class Snapshot(proto.Message):
+ r"""
+
+ Attributes:
+ docs (Sequence[~.document.Document]):
+
+ changes (Sequence[~.gcf_tests.DocChange]):
+
+ read_time (~.timestamp.Timestamp):
+
+ """
+
+ docs = proto.RepeatedField(proto.MESSAGE, number=1, message=document.Document,)
+
+ changes = proto.RepeatedField(proto.MESSAGE, number=2, message="DocChange",)
+
+ read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
+
+
+class DocChange(proto.Message):
+ r"""
+
+ Attributes:
+ kind (~.gcf_tests.DocChange.Kind):
+
+ doc (~.document.Document):
+
+ old_index (int):
+
+ new_index (int):
+
+ """
+
+ class Kind(proto.Enum):
+ r""""""
+ KIND_UNSPECIFIED = 0
+ ADDED = 1
+ REMOVED = 2
+ MODIFIED = 3
+
+ kind = proto.Field(proto.ENUM, number=1, enum=Kind,)
+
+ doc = proto.Field(proto.MESSAGE, number=2, message=document.Document,)
+
+ old_index = proto.Field(proto.INT32, number=3)
+
+ new_index = proto.Field(proto.INT32, number=4)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/tests/unit/v1/test__helpers.py b/tests/unit/v1/test__helpers.py
index e804d9bfcb..5c4c459dbb 100644
--- a/tests/unit/v1/test__helpers.py
+++ b/tests/unit/v1/test__helpers.py
@@ -20,6 +20,20 @@
import mock
+class AsyncMock(mock.MagicMock):
+ async def __call__(self, *args, **kwargs):
+ return super(AsyncMock, self).__call__(*args, **kwargs)
+
+
+class AsyncIter:
+ def __init__(self, items):
+ self.items = items
+
+ async def __aiter__(self, **_):
+ for i in self.items:
+ yield i
+
+
class TestGeoPoint(unittest.TestCase):
@staticmethod
def _get_target_class():
@@ -219,7 +233,7 @@ def test_geo_point(self):
self.assertEqual(result, expected)
def test_array(self):
- from google.cloud.firestore_v1.proto.document_pb2 import ArrayValue
+ from google.cloud.firestore_v1.types.document import ArrayValue
result = self._call_fut([99, True, 118.5])
@@ -234,7 +248,7 @@ def test_array(self):
self.assertEqual(result, expected)
def test_map(self):
- from google.cloud.firestore_v1.proto.document_pb2 import MapValue
+ from google.cloud.firestore_v1.types.document import MapValue
result = self._call_fut({"abc": 285, "def": b"piglatin"})
@@ -263,8 +277,8 @@ def _call_fut(values_dict):
def test_many_types(self):
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto.document_pb2 import ArrayValue
- from google.cloud.firestore_v1.proto.document_pb2 import MapValue
+ from google.cloud.firestore_v1.types.document import ArrayValue
+ from google.cloud.firestore_v1.types.document import MapValue
dt_seconds = 1497397225
dt_nanos = 465964000
@@ -444,12 +458,12 @@ def test_geo_point(self):
self.assertEqual(self._call_fut(value), geo_pt)
def test_array(self):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
sub_value1 = _value_pb(boolean_value=True)
sub_value2 = _value_pb(double_value=14.1396484375)
sub_value3 = _value_pb(bytes_value=b"\xde\xad\xbe\xef")
- array_pb = document_pb2.ArrayValue(values=[sub_value1, sub_value2, sub_value3])
+ array_pb = document.ArrayValue(values=[sub_value1, sub_value2, sub_value3])
value = _value_pb(array_value=array_pb)
expected = [
@@ -460,13 +474,11 @@ def test_array(self):
self.assertEqual(self._call_fut(value), expected)
def test_map(self):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
sub_value1 = _value_pb(integer_value=187680)
sub_value2 = _value_pb(string_value=u"how low can you go?")
- map_pb = document_pb2.MapValue(
- fields={"first": sub_value1, "second": sub_value2}
- )
+ map_pb = document.MapValue(fields={"first": sub_value1, "second": sub_value2})
value = _value_pb(map_value=map_pb)
expected = {
@@ -476,24 +488,24 @@ def test_map(self):
self.assertEqual(self._call_fut(value), expected)
def test_nested_map(self):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
actual_value1 = 1009876
actual_value2 = u"hey you guys"
actual_value3 = 90.875
- map_pb1 = document_pb2.MapValue(
+ map_pb1 = document.MapValue(
fields={
"lowest": _value_pb(integer_value=actual_value1),
"aside": _value_pb(string_value=actual_value2),
}
)
- map_pb2 = document_pb2.MapValue(
+ map_pb2 = document.MapValue(
fields={
"middle": _value_pb(map_value=map_pb1),
"aside": _value_pb(boolean_value=True),
}
)
- map_pb3 = document_pb2.MapValue(
+ map_pb3 = document.MapValue(
fields={
"highest": _value_pb(map_value=map_pb2),
"aside": _value_pb(double_value=actual_value3),
@@ -515,13 +527,13 @@ def test_unset_value_type(self):
self._call_fut(_value_pb())
def test_unknown_value_type(self):
- value_pb = mock.Mock(spec=["WhichOneof"])
- value_pb.WhichOneof.return_value = "zoob_value"
+ value_pb = mock.Mock()
+ value_pb._pb.WhichOneof.return_value = "zoob_value"
with self.assertRaises(ValueError):
self._call_fut(value_pb)
- value_pb.WhichOneof.assert_called_once_with("value_type")
+ value_pb._pb.WhichOneof.assert_called_once_with("value_type")
class Test_decode_dict(unittest.TestCase):
@@ -537,8 +549,8 @@ def _call_fut(value_fields, client=mock.sentinel.client):
def test_many_types(self):
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto.document_pb2 import ArrayValue
- from google.cloud.firestore_v1.proto.document_pb2 import MapValue
+ from google.cloud.firestore_v1.types.document import ArrayValue
+ from google.cloud.firestore_v1.types.document import MapValue
from google.cloud._helpers import UTC
from google.cloud.firestore_v1.field_path import FieldPath
@@ -612,24 +624,24 @@ def _dummy_ref_string(collection_id):
)
def test_success(self):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
prefix = self._dummy_ref_string("sub-collection")
actual_id = "this-is-the-one"
name = "{}/{}".format(prefix, actual_id)
- document_pb = document_pb2.Document(name=name)
+ document_pb = document.Document(name=name)
document_id = self._call_fut(document_pb, prefix)
self.assertEqual(document_id, actual_id)
def test_failure(self):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
actual_prefix = self._dummy_ref_string("the-right-one")
wrong_prefix = self._dummy_ref_string("the-wrong-one")
name = "{}/{}".format(actual_prefix, "sorry-wont-works")
- document_pb = document_pb2.Document(name=name)
+ document_pb = document.Document(name=name)
with self.assertRaises(ValueError) as exc_info:
self._call_fut(document_pb, wrong_prefix)
@@ -1225,7 +1237,7 @@ def test_ctor_w_normal_value_nested(self):
self.assertFalse(inst.has_transforms)
def test_get_update_pb_w_exists_precondition(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
document_data = {}
inst = self._make_one(document_data)
@@ -1235,14 +1247,14 @@ def test_get_update_pb_w_exists_precondition(self):
update_pb = inst.get_update_pb(document_path, exists=False)
- self.assertIsInstance(update_pb, write_pb2.Write)
+ self.assertIsInstance(update_pb, write.Write)
self.assertEqual(update_pb.update.name, document_path)
self.assertEqual(update_pb.update.fields, document_data)
- self.assertTrue(update_pb.HasField("current_document"))
+ self.assertTrue(update_pb._pb.HasField("current_document"))
self.assertFalse(update_pb.current_document.exists)
def test_get_update_pb_wo_exists_precondition(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1._helpers import encode_dict
document_data = {"a": 1}
@@ -1253,13 +1265,45 @@ def test_get_update_pb_wo_exists_precondition(self):
update_pb = inst.get_update_pb(document_path)
- self.assertIsInstance(update_pb, write_pb2.Write)
+ self.assertIsInstance(update_pb, write.Write)
self.assertEqual(update_pb.update.name, document_path)
self.assertEqual(update_pb.update.fields, encode_dict(document_data))
- self.assertFalse(update_pb.HasField("current_document"))
+ self.assertFalse(update_pb._pb.HasField("current_document"))
+
+ def test_get_field_transform_pbs_miss(self):
+ document_data = {"a": 1}
+ inst = self._make_one(document_data)
+ document_path = (
+ "projects/project-id/databases/(default)/" "documents/document-id"
+ )
+
+ field_transform_pbs = inst.get_field_transform_pbs(document_path)
+
+ self.assertEqual(field_transform_pbs, [])
+
+ def test_get_field_transform_pbs_w_server_timestamp(self):
+ from google.cloud.firestore_v1.types import write
+ from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
+ from google.cloud.firestore_v1._helpers import REQUEST_TIME_ENUM
+
+ document_data = {"a": SERVER_TIMESTAMP}
+ inst = self._make_one(document_data)
+ document_path = (
+ "projects/project-id/databases/(default)/" "documents/document-id"
+ )
+
+ field_transform_pbs = inst.get_field_transform_pbs(document_path)
+
+ self.assertEqual(len(field_transform_pbs), 1)
+ field_transform_pb = field_transform_pbs[0]
+ self.assertIsInstance(
+ field_transform_pb, write.DocumentTransform.FieldTransform
+ )
+ self.assertEqual(field_transform_pb.field_path, "a")
+ self.assertEqual(field_transform_pb.set_to_server_value, REQUEST_TIME_ENUM)
def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
from google.cloud.firestore_v1._helpers import REQUEST_TIME_ENUM
@@ -1271,18 +1315,18 @@ def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self):
transform_pb = inst.get_transform_pb(document_path, exists=False)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
transform = transforms[0]
self.assertEqual(transform.field_path, "a")
self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM)
- self.assertTrue(transform_pb.HasField("current_document"))
+ self.assertTrue(transform_pb._pb.HasField("current_document"))
self.assertFalse(transform_pb.current_document.exists)
def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
from google.cloud.firestore_v1._helpers import REQUEST_TIME_ENUM
@@ -1294,14 +1338,14 @@ def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
transform = transforms[0]
self.assertEqual(transform.field_path, "a.b.c")
self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
@staticmethod
def _array_value_to_list(array_value):
@@ -1310,7 +1354,7 @@ def _array_value_to_list(array_value):
return [decode_value(element, client=None) for element in array_value.values]
def test_get_transform_pb_w_array_remove(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import ArrayRemove
values = [2, 4, 8]
@@ -1322,7 +1366,7 @@ def test_get_transform_pb_w_array_remove(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1330,10 +1374,10 @@ def test_get_transform_pb_w_array_remove(self):
self.assertEqual(transform.field_path, "a.b.c")
removed = self._array_value_to_list(transform.remove_all_from_array)
self.assertEqual(removed, values)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_array_union(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import ArrayUnion
values = [1, 3, 5]
@@ -1345,7 +1389,7 @@ def test_get_transform_pb_w_array_union(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1353,10 +1397,10 @@ def test_get_transform_pb_w_array_union(self):
self.assertEqual(transform.field_path, "a.b.c")
added = self._array_value_to_list(transform.append_missing_elements)
self.assertEqual(added, values)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_increment_int(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import Increment
value = 1
@@ -1368,7 +1412,7 @@ def test_get_transform_pb_w_increment_int(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1376,10 +1420,10 @@ def test_get_transform_pb_w_increment_int(self):
self.assertEqual(transform.field_path, "a.b.c")
added = transform.increment.integer_value
self.assertEqual(added, value)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_increment_float(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import Increment
value = 3.1415926
@@ -1391,7 +1435,7 @@ def test_get_transform_pb_w_increment_float(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1399,10 +1443,10 @@ def test_get_transform_pb_w_increment_float(self):
self.assertEqual(transform.field_path, "a.b.c")
added = transform.increment.double_value
self.assertEqual(added, value)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_maximum_int(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import Maximum
value = 1
@@ -1414,7 +1458,7 @@ def test_get_transform_pb_w_maximum_int(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1422,10 +1466,10 @@ def test_get_transform_pb_w_maximum_int(self):
self.assertEqual(transform.field_path, "a.b.c")
added = transform.maximum.integer_value
self.assertEqual(added, value)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_maximum_float(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import Maximum
value = 3.1415926
@@ -1437,7 +1481,7 @@ def test_get_transform_pb_w_maximum_float(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1445,10 +1489,10 @@ def test_get_transform_pb_w_maximum_float(self):
self.assertEqual(transform.field_path, "a.b.c")
added = transform.maximum.double_value
self.assertEqual(added, value)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_minimum_int(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import Minimum
value = 1
@@ -1460,7 +1504,7 @@ def test_get_transform_pb_w_minimum_int(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1468,10 +1512,10 @@ def test_get_transform_pb_w_minimum_int(self):
self.assertEqual(transform.field_path, "a.b.c")
added = transform.minimum.integer_value
self.assertEqual(added, value)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
def test_get_transform_pb_w_minimum_float(self):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transforms import Minimum
value = 3.1415926
@@ -1483,7 +1527,7 @@ def test_get_transform_pb_w_minimum_float(self):
transform_pb = inst.get_transform_pb(document_path)
- self.assertIsInstance(transform_pb, write_pb2.Write)
+ self.assertIsInstance(transform_pb, write.Write)
self.assertEqual(transform_pb.transform.document, document_path)
transforms = transform_pb.transform.field_transforms
self.assertEqual(len(transforms), 1)
@@ -1491,7 +1535,7 @@ def test_get_transform_pb_w_minimum_float(self):
self.assertEqual(transform.field_path, "a.b.c")
added = transform.minimum.double_value
self.assertEqual(added, value)
- self.assertFalse(transform_pb.HasField("current_document"))
+ self.assertFalse(transform_pb._pb.HasField("current_document"))
class Test_pbs_for_create(unittest.TestCase):
@@ -1503,34 +1547,27 @@ def _call_fut(document_path, document_data):
@staticmethod
def _make_write_w_document(document_path, **data):
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1._helpers import encode_dict
- from google.cloud.firestore_v1.proto import common_pb2
+ from google.cloud.firestore_v1.types import common
- return write_pb2.Write(
- update=document_pb2.Document(name=document_path, fields=encode_dict(data)),
- current_document=common_pb2.Precondition(exists=False),
+ return write.Write(
+ update=document.Document(name=document_path, fields=encode_dict(data)),
+ current_document=common.Precondition(exists=False),
)
@staticmethod
- def _make_write_w_transform(document_path, fields):
- from google.cloud.firestore_v1.proto import write_pb2
- from google.cloud.firestore_v1.gapic import enums
-
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- transforms = [
- write_pb2.DocumentTransform.FieldTransform(
- field_path=field, set_to_server_value=server_val.REQUEST_TIME
- )
- for field in fields
- ]
-
- return write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=transforms
+ def _add_field_transforms(update_pb, fields):
+ from google.cloud.firestore_v1 import DocumentTransform
+
+ server_val = DocumentTransform.FieldTransform.ServerValue
+ for field in fields:
+ update_pb.update_transforms.append(
+ DocumentTransform.FieldTransform(
+ field_path=field, set_to_server_value=server_val.REQUEST_TIME
+ )
)
- )
def _helper(self, do_transform=False, empty_val=False):
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
@@ -1557,9 +1594,7 @@ def _helper(self, do_transform=False, empty_val=False):
expected_pbs = [update_pb]
if do_transform:
- expected_pbs.append(
- self._make_write_w_transform(document_path, fields=["butter"])
- )
+ self._add_field_transforms(update_pb, fields=["butter"])
self.assertEqual(write_pbs, expected_pbs)
@@ -1582,32 +1617,25 @@ def _call_fut(document_path, document_data):
@staticmethod
def _make_write_w_document(document_path, **data):
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1._helpers import encode_dict
- return write_pb2.Write(
- update=document_pb2.Document(name=document_path, fields=encode_dict(data))
+ return write.Write(
+ update=document.Document(name=document_path, fields=encode_dict(data))
)
@staticmethod
- def _make_write_w_transform(document_path, fields):
- from google.cloud.firestore_v1.proto import write_pb2
- from google.cloud.firestore_v1.gapic import enums
-
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- transforms = [
- write_pb2.DocumentTransform.FieldTransform(
- field_path=field, set_to_server_value=server_val.REQUEST_TIME
- )
- for field in fields
- ]
-
- return write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=transforms
+ def _add_field_transforms(update_pb, fields):
+ from google.cloud.firestore_v1 import DocumentTransform
+
+ server_val = DocumentTransform.FieldTransform.ServerValue
+ for field in fields:
+ update_pb.update_transforms.append(
+ DocumentTransform.FieldTransform(
+ field_path=field, set_to_server_value=server_val.REQUEST_TIME
+ )
)
- )
def test_w_empty_document(self):
document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
@@ -1628,8 +1656,8 @@ def test_w_only_server_timestamp(self):
write_pbs = self._call_fut(document_path, document_data)
update_pb = self._make_write_w_document(document_path)
- transform_pb = self._make_write_w_transform(document_path, ["butter"])
- expected_pbs = [update_pb, transform_pb]
+ self._add_field_transforms(update_pb, fields=["butter"])
+ expected_pbs = [update_pb]
self.assertEqual(write_pbs, expected_pbs)
def _helper(self, do_transform=False, empty_val=False):
@@ -1657,9 +1685,7 @@ def _helper(self, do_transform=False, empty_val=False):
expected_pbs = [update_pb]
if do_transform:
- expected_pbs.append(
- self._make_write_w_transform(document_path, fields=["butter"])
- )
+ self._add_field_transforms(update_pb, fields=["butter"])
self.assertEqual(write_pbs, expected_pbs)
@@ -1702,7 +1728,6 @@ def test_apply_merge_all_w_empty_document(self):
self.assertEqual(inst.data_merge, [])
self.assertEqual(inst.transform_merge, [])
self.assertEqual(inst.merge, [])
- self.assertFalse(inst.has_updates)
def test_apply_merge_all_w_delete(self):
from google.cloud.firestore_v1.transforms import DELETE_FIELD
@@ -1719,7 +1744,6 @@ def test_apply_merge_all_w_delete(self):
self.assertEqual(inst.data_merge, expected_data_merge)
self.assertEqual(inst.transform_merge, [])
self.assertEqual(inst.merge, expected_data_merge)
- self.assertTrue(inst.has_updates)
def test_apply_merge_all_w_server_timestamp(self):
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
@@ -1735,7 +1759,6 @@ def test_apply_merge_all_w_server_timestamp(self):
self.assertEqual(inst.data_merge, expected_data_merge)
self.assertEqual(inst.transform_merge, expected_transform_merge)
self.assertEqual(inst.merge, expected_merge)
- self.assertTrue(inst.has_updates)
def test_apply_merge_list_fields_w_empty_document(self):
document_data = {}
@@ -1774,7 +1797,6 @@ def test_apply_merge_list_fields_w_delete(self):
expected_deleted_fields = [_make_field_path("delete_me")]
self.assertEqual(inst.set_fields, expected_set_fields)
self.assertEqual(inst.deleted_fields, expected_deleted_fields)
- self.assertTrue(inst.has_updates)
def test_apply_merge_list_fields_w_prefixes(self):
@@ -1801,7 +1823,6 @@ def test_apply_merge_list_fields_w_non_merge_field(self):
expected_set_fields = {"write_me": "value"}
self.assertEqual(inst.set_fields, expected_set_fields)
- self.assertTrue(inst.has_updates)
def test_apply_merge_list_fields_w_server_timestamp(self):
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
@@ -1823,7 +1844,6 @@ def test_apply_merge_list_fields_w_server_timestamp(self):
self.assertEqual(inst.merge, expected_merge)
expected_server_timestamps = [_make_field_path("timestamp")]
self.assertEqual(inst.server_timestamps, expected_server_timestamps)
- self.assertTrue(inst.has_updates)
def test_apply_merge_list_fields_w_array_remove(self):
from google.cloud.firestore_v1.transforms import ArrayRemove
@@ -1846,7 +1866,6 @@ def test_apply_merge_list_fields_w_array_remove(self):
self.assertEqual(inst.merge, expected_merge)
expected_array_removes = {_make_field_path("remove_me"): values}
self.assertEqual(inst.array_removes, expected_array_removes)
- self.assertTrue(inst.has_updates)
def test_apply_merge_list_fields_w_array_union(self):
from google.cloud.firestore_v1.transforms import ArrayUnion
@@ -1869,7 +1888,6 @@ def test_apply_merge_list_fields_w_array_union(self):
self.assertEqual(inst.merge, expected_merge)
expected_array_unions = {_make_field_path("union_me"): values}
self.assertEqual(inst.array_unions, expected_array_unions)
- self.assertTrue(inst.has_updates)
class Test_pbs_for_set_with_merge(unittest.TestCase):
@@ -1883,39 +1901,32 @@ def _call_fut(document_path, document_data, merge):
@staticmethod
def _make_write_w_document(document_path, **data):
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1._helpers import encode_dict
- return write_pb2.Write(
- update=document_pb2.Document(name=document_path, fields=encode_dict(data))
+ return write.Write(
+ update=document.Document(name=document_path, fields=encode_dict(data))
)
@staticmethod
- def _make_write_w_transform(document_path, fields):
- from google.cloud.firestore_v1.proto import write_pb2
- from google.cloud.firestore_v1.gapic import enums
-
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- transforms = [
- write_pb2.DocumentTransform.FieldTransform(
- field_path=field, set_to_server_value=server_val.REQUEST_TIME
- )
- for field in fields
- ]
-
- return write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=transforms
+ def _add_field_transforms(update_pb, fields):
+ from google.cloud.firestore_v1 import DocumentTransform
+
+ server_val = DocumentTransform.FieldTransform.ServerValue
+ for field in fields:
+ update_pb.update_transforms.append(
+ DocumentTransform.FieldTransform(
+ field_path=field, set_to_server_value=server_val.REQUEST_TIME
+ )
)
- )
@staticmethod
def _update_document_mask(update_pb, field_paths):
- from google.cloud.firestore_v1.proto import common_pb2
+ from google.cloud.firestore_v1.types import common
- update_pb.update_mask.CopyFrom(
- common_pb2.DocumentMask(field_paths=sorted(field_paths))
+ update_pb._pb.update_mask.CopyFrom(
+ common.DocumentMask(field_paths=sorted(field_paths))._pb
)
def test_with_merge_true_wo_transform(self):
@@ -1942,6 +1953,20 @@ def test_with_merge_field_wo_transform(self):
expected_pbs = [update_pb]
self.assertEqual(write_pbs, expected_pbs)
+ def test_with_merge_true_w_only_transform(self):
+ from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
+
+ document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
+ document_data = {"butter": SERVER_TIMESTAMP}
+
+ write_pbs = self._call_fut(document_path, document_data, merge=True)
+
+ update_pb = self._make_write_w_document(document_path)
+ self._update_document_mask(update_pb, field_paths=())
+ self._add_field_transforms(update_pb, fields=["butter"])
+ expected_pbs = [update_pb]
+ self.assertEqual(write_pbs, expected_pbs)
+
def test_with_merge_true_w_transform(self):
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
@@ -1954,8 +1979,8 @@ def test_with_merge_true_w_transform(self):
update_pb = self._make_write_w_document(document_path, **update_data)
self._update_document_mask(update_pb, field_paths=sorted(update_data))
- transform_pb = self._make_write_w_transform(document_path, fields=["butter"])
- expected_pbs = [update_pb, transform_pb]
+ self._add_field_transforms(update_pb, fields=["butter"])
+ expected_pbs = [update_pb]
self.assertEqual(write_pbs, expected_pbs)
def test_with_merge_field_w_transform(self):
@@ -1974,8 +1999,8 @@ def test_with_merge_field_w_transform(self):
document_path, cheese=document_data["cheese"]
)
self._update_document_mask(update_pb, ["cheese"])
- transform_pb = self._make_write_w_transform(document_path, fields=["butter"])
- expected_pbs = [update_pb, transform_pb]
+ self._add_field_transforms(update_pb, fields=["butter"])
+ expected_pbs = [update_pb]
self.assertEqual(write_pbs, expected_pbs)
def test_with_merge_field_w_transform_masking_simple(self):
@@ -1989,10 +2014,9 @@ def test_with_merge_field_w_transform_masking_simple(self):
write_pbs = self._call_fut(document_path, document_data, merge=["butter.pecan"])
update_pb = self._make_write_w_document(document_path)
- transform_pb = self._make_write_w_transform(
- document_path, fields=["butter.pecan"]
- )
- expected_pbs = [update_pb, transform_pb]
+ self._update_document_mask(update_pb, field_paths=())
+ self._add_field_transforms(update_pb, fields=["butter.pecan"])
+ expected_pbs = [update_pb]
self.assertEqual(write_pbs, expected_pbs)
def test_with_merge_field_w_transform_parent(self):
@@ -2011,10 +2035,8 @@ def test_with_merge_field_w_transform_parent(self):
document_path, cheese=update_data["cheese"], butter={"popcorn": "yum"}
)
self._update_document_mask(update_pb, ["cheese", "butter"])
- transform_pb = self._make_write_w_transform(
- document_path, fields=["butter.pecan"]
- )
- expected_pbs = [update_pb, transform_pb]
+ self._add_field_transforms(update_pb, fields=["butter.pecan"])
+ expected_pbs = [update_pb]
self.assertEqual(write_pbs, expected_pbs)
@@ -2092,10 +2114,10 @@ def _helper(self, option=None, do_transform=False, **write_kwargs):
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.field_path import FieldPath
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1 import DocumentTransform
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
document_path = _make_ref_string(u"toy", u"car", u"onion", u"garlic")
field_path1 = "bitez.yum"
@@ -2108,54 +2130,50 @@ def _helper(self, option=None, do_transform=False, **write_kwargs):
write_pbs = self._call_fut(document_path, field_updates, option)
- map_pb = document_pb2.MapValue(fields={"yum": _value_pb(bytes_value=value)})
+ map_pb = document.MapValue(fields={"yum": _value_pb(bytes_value=value)})
field_paths = [field_path1]
- expected_update_pb = write_pb2.Write(
- update=document_pb2.Document(
+ expected_update_pb = write.Write(
+ update=document.Document(
name=document_path, fields={"bitez": _value_pb(map_value=map_pb)}
),
- update_mask=common_pb2.DocumentMask(field_paths=field_paths),
+ update_mask=common.DocumentMask(field_paths=field_paths),
**write_kwargs
)
if isinstance(option, _helpers.ExistsOption):
- precondition = common_pb2.Precondition(exists=False)
- expected_update_pb.current_document.CopyFrom(precondition)
- expected_pbs = [expected_update_pb]
+ precondition = common.Precondition(exists=False)
+ expected_update_pb._pb.current_document.CopyFrom(precondition._pb)
+
if do_transform:
transform_paths = FieldPath.from_string(field_path2)
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- expected_transform_pb = write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path,
- field_transforms=[
- write_pb2.DocumentTransform.FieldTransform(
- field_path=transform_paths.to_api_repr(),
- set_to_server_value=server_val.REQUEST_TIME,
- )
- ],
+ server_val = DocumentTransform.FieldTransform.ServerValue
+ field_transform_pbs = [
+ write.DocumentTransform.FieldTransform(
+ field_path=transform_paths.to_api_repr(),
+ set_to_server_value=server_val.REQUEST_TIME,
)
- )
- expected_pbs.append(expected_transform_pb)
- self.assertEqual(write_pbs, expected_pbs)
+ ]
+ expected_update_pb.update_transforms.extend(field_transform_pbs)
+
+ self.assertEqual(write_pbs, [expected_update_pb])
def test_without_option(self):
- from google.cloud.firestore_v1.proto import common_pb2
+ from google.cloud.firestore_v1.types import common
- precondition = common_pb2.Precondition(exists=True)
+ precondition = common.Precondition(exists=True)
self._helper(current_document=precondition)
def test_with_exists_option(self):
- from google.cloud.firestore_v1.client import _helpers
+ from google.cloud.firestore_v1 import _helpers
option = _helpers.ExistsOption(False)
self._helper(option=option)
def test_update_and_transform(self):
- from google.cloud.firestore_v1.proto import common_pb2
+ from google.cloud.firestore_v1.types import common
- precondition = common_pb2.Precondition(exists=True)
+ precondition = common.Precondition(exists=True)
self._helper(current_document=precondition, do_transform=True)
@@ -2167,12 +2185,12 @@ def _call_fut(document_path, option):
return pb_for_delete(document_path, option)
def _helper(self, option=None, **write_kwargs):
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import write
document_path = _make_ref_string(u"chicken", u"philly", u"one", u"two")
write_pb = self._call_fut(document_path, option)
- expected_pb = write_pb2.Write(delete=document_path, **write_kwargs)
+ expected_pb = write.Write(delete=document_path, **write_kwargs)
self.assertEqual(write_pb, expected_pb)
def test_without_option(self):
@@ -2180,12 +2198,12 @@ def test_without_option(self):
def test_with_option(self):
from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto import common_pb2
+ from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1 import _helpers
update_time = timestamp_pb2.Timestamp(seconds=1309700594, nanos=822211297)
option = _helpers.LastUpdateOption(update_time)
- precondition = common_pb2.Precondition(update_time=update_time)
+ precondition = common.Precondition(update_time=update_time)
self._helper(option=option, current_document=precondition)
@@ -2304,16 +2322,16 @@ def test___eq___same_timestamp(self):
def test_modify_write_update_time(self):
from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import write
timestamp_pb = timestamp_pb2.Timestamp(seconds=683893592, nanos=229362000)
option = self._make_one(timestamp_pb)
- write_pb = write_pb2.Write()
+ write_pb = write.Write()
ret_val = option.modify_write(write_pb)
self.assertIsNone(ret_val)
- expected_doc = common_pb2.Precondition(update_time=timestamp_pb)
+ expected_doc = common.Precondition(update_time=timestamp_pb)
self.assertEqual(write_pb.current_document, expected_doc)
@@ -2348,21 +2366,66 @@ def test___eq___same_exists(self):
self.assertTrue(option == other)
def test_modify_write(self):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import write
for exists in (True, False):
option = self._make_one(exists)
- write_pb = write_pb2.Write()
+ write_pb = write.Write()
ret_val = option.modify_write(write_pb)
self.assertIsNone(ret_val)
- expected_doc = common_pb2.Precondition(exists=exists)
+ expected_doc = common.Precondition(exists=exists)
self.assertEqual(write_pb.current_document, expected_doc)
+class Test_make_retry_timeout_kwargs(unittest.TestCase):
+ @staticmethod
+ def _call_fut(retry, timeout):
+ from google.cloud.firestore_v1._helpers import make_retry_timeout_kwargs
+
+ return make_retry_timeout_kwargs(retry, timeout)
+
+ def test_default(self):
+ from google.api_core.gapic_v1.method import DEFAULT
+
+ kwargs = self._call_fut(DEFAULT, None)
+ expected = {}
+ self.assertEqual(kwargs, expected)
+
+ def test_retry_None(self):
+ kwargs = self._call_fut(None, None)
+ expected = {"retry": None}
+ self.assertEqual(kwargs, expected)
+
+ def test_retry_only(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ kwargs = self._call_fut(retry, None)
+ expected = {"retry": retry}
+ self.assertEqual(kwargs, expected)
+
+ def test_timeout_only(self):
+ from google.api_core.gapic_v1.method import DEFAULT
+
+ timeout = 123.0
+ kwargs = self._call_fut(DEFAULT, timeout)
+ expected = {"timeout": timeout}
+ self.assertEqual(kwargs, expected)
+
+ def test_retry_and_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ kwargs = self._call_fut(retry, timeout)
+ expected = {"retry": retry, "timeout": timeout}
+ self.assertEqual(kwargs, expected)
+
+
def _value_pb(**kwargs):
- from google.cloud.firestore_v1.proto.document_pb2 import Value
+ from google.cloud.firestore_v1.types.document import Value
return Value(**kwargs)
diff --git a/tests/unit/v1/test_async_batch.py b/tests/unit/v1/test_async_batch.py
new file mode 100644
index 0000000000..dce1cefdf7
--- /dev/null
+++ b/tests/unit/v1/test_async_batch.py
@@ -0,0 +1,174 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import aiounittest
+
+import mock
+from tests.unit.v1.test__helpers import AsyncMock
+
+
+class TestAsyncWriteBatch(aiounittest.AsyncTestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.async_batch import AsyncWriteBatch
+
+ return AsyncWriteBatch
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ batch = self._make_one(mock.sentinel.client)
+ self.assertIs(batch._client, mock.sentinel.client)
+ self.assertEqual(batch._write_pbs, [])
+ self.assertIsNone(batch.write_results)
+ self.assertIsNone(batch.commit_time)
+
+ async def _commit_helper(self, retry=None, timeout=None):
+ from google.protobuf import timestamp_pb2
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
+
+ # Create a minimal fake GAPIC with a dummy result.
+ firestore_api = AsyncMock(spec=["commit"])
+ timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798)
+ commit_response = firestore.CommitResponse(
+ write_results=[write.WriteResult(), write.WriteResult()],
+ commit_time=timestamp,
+ )
+ firestore_api.commit.return_value = commit_response
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("grand")
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a batch with some mutations and call commit().
+ batch = self._make_one(client)
+ document1 = client.document("a", "b")
+ batch.create(document1, {"ten": 10, "buck": "ets"})
+ document2 = client.document("c", "d", "e", "f")
+ batch.delete(document2)
+ write_pbs = batch._write_pbs[::]
+
+ write_results = await batch.commit(**kwargs)
+
+ self.assertEqual(write_results, list(commit_response.write_results))
+ self.assertEqual(batch.write_results, write_results)
+ self.assertEqual(batch.commit_time.timestamp_pb(), timestamp)
+ # Make sure batch has no more "changes".
+ self.assertEqual(batch._write_pbs, [])
+
+ # Verify the mocks.
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_commit(self):
+ await self._commit_helper()
+
+ @pytest.mark.asyncio
+ async def test_commit_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+
+ await self._commit_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_as_context_mgr_wo_error(self):
+ from google.protobuf import timestamp_pb2
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
+
+ firestore_api = AsyncMock(spec=["commit"])
+ timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798)
+ commit_response = firestore.CommitResponse(
+ write_results=[write.WriteResult(), write.WriteResult()],
+ commit_time=timestamp,
+ )
+ firestore_api.commit.return_value = commit_response
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+ batch = self._make_one(client)
+ document1 = client.document("a", "b")
+ document2 = client.document("c", "d", "e", "f")
+
+ async with batch as ctx_mgr:
+ self.assertIs(ctx_mgr, batch)
+ ctx_mgr.create(document1, {"ten": 10, "buck": "ets"})
+ ctx_mgr.delete(document2)
+ write_pbs = batch._write_pbs[::]
+
+ self.assertEqual(batch.write_results, list(commit_response.write_results))
+ self.assertEqual(batch.commit_time.timestamp_pb(), timestamp)
+ # Make sure batch has no more "changes".
+ self.assertEqual(batch._write_pbs, [])
+
+ # Verify the mocks.
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ @pytest.mark.asyncio
+ async def test_as_context_mgr_w_error(self):
+ firestore_api = AsyncMock(spec=["commit"])
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+ batch = self._make_one(client)
+ document1 = client.document("a", "b")
+ document2 = client.document("c", "d", "e", "f")
+
+ with self.assertRaises(RuntimeError):
+ async with batch as ctx_mgr:
+ ctx_mgr.create(document1, {"ten": 10, "buck": "ets"})
+ ctx_mgr.delete(document2)
+ raise RuntimeError("testing")
+
+ # batch still has its changes, as _aexit_ (and commit) is not invoked
+ # changes are preserved so commit can be retried
+ self.assertIsNone(batch.write_results)
+ self.assertIsNone(batch.commit_time)
+ self.assertEqual(len(batch._write_pbs), 2)
+
+ firestore_api.commit.assert_not_called()
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_client(project="seventy-nine"):
+ from google.cloud.firestore_v1.client import Client
+
+ credentials = _make_credentials()
+ return Client(project=project, credentials=credentials)
diff --git a/tests/unit/v1/test_async_client.py b/tests/unit/v1/test_async_client.py
new file mode 100644
index 0000000000..44d81d0583
--- /dev/null
+++ b/tests/unit/v1/test_async_client.py
@@ -0,0 +1,427 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import datetime
+import types
+import aiounittest
+
+import mock
+from tests.unit.v1.test__helpers import AsyncMock, AsyncIter
+
+
+class TestAsyncClient(aiounittest.AsyncTestCase):
+
+ PROJECT = "my-prahjekt"
+
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.async_client import AsyncClient
+
+ return AsyncClient
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def _make_default_one(self):
+ credentials = _make_credentials()
+ return self._make_one(project=self.PROJECT, credentials=credentials)
+
+ def test_constructor(self):
+ from google.cloud.firestore_v1.async_client import _CLIENT_INFO
+ from google.cloud.firestore_v1.async_client import DEFAULT_DATABASE
+
+ credentials = _make_credentials()
+ client = self._make_one(project=self.PROJECT, credentials=credentials)
+ self.assertEqual(client.project, self.PROJECT)
+ self.assertEqual(client._credentials, credentials)
+ self.assertEqual(client._database, DEFAULT_DATABASE)
+ self.assertIs(client._client_info, _CLIENT_INFO)
+ self.assertIsNone(client._emulator_host)
+
+ def test_constructor_with_emulator_host(self):
+ from google.cloud.firestore_v1.base_client import _FIRESTORE_EMULATOR_HOST
+
+ credentials = _make_credentials()
+ emulator_host = "localhost:8081"
+ with mock.patch("os.getenv") as getenv:
+ getenv.return_value = emulator_host
+ client = self._make_one(project=self.PROJECT, credentials=credentials)
+ self.assertEqual(client._emulator_host, emulator_host)
+ getenv.assert_called_once_with(_FIRESTORE_EMULATOR_HOST)
+
+ def test_constructor_explicit(self):
+ from google.api_core.client_options import ClientOptions
+
+ credentials = _make_credentials()
+ database = "now-db"
+ client_info = mock.Mock()
+ client_options = ClientOptions("endpoint")
+ client = self._make_one(
+ project=self.PROJECT,
+ credentials=credentials,
+ database=database,
+ client_info=client_info,
+ client_options=client_options,
+ )
+ self.assertEqual(client.project, self.PROJECT)
+ self.assertEqual(client._credentials, credentials)
+ self.assertEqual(client._database, database)
+ self.assertIs(client._client_info, client_info)
+ self.assertIs(client._client_options, client_options)
+
+ def test_constructor_w_client_options(self):
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT,
+ credentials=credentials,
+ client_options={"api_endpoint": "foo-firestore.googleapis.com"},
+ )
+ self.assertEqual(client._target, "foo-firestore.googleapis.com")
+
+ def test_collection_factory(self):
+ from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+
+ collection_id = "users"
+ client = self._make_default_one()
+ collection = client.collection(collection_id)
+
+ self.assertEqual(collection._path, (collection_id,))
+ self.assertIs(collection._client, client)
+ self.assertIsInstance(collection, AsyncCollectionReference)
+
+ def test_collection_factory_nested(self):
+ from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+
+ client = self._make_default_one()
+ parts = ("users", "alovelace", "beep")
+ collection_path = "/".join(parts)
+ collection1 = client.collection(collection_path)
+
+ self.assertEqual(collection1._path, parts)
+ self.assertIs(collection1._client, client)
+ self.assertIsInstance(collection1, AsyncCollectionReference)
+
+ # Make sure using segments gives the same result.
+ collection2 = client.collection(*parts)
+ self.assertEqual(collection2._path, parts)
+ self.assertIs(collection2._client, client)
+ self.assertIsInstance(collection2, AsyncCollectionReference)
+
+ def test__get_collection_reference(self):
+ from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+
+ client = self._make_default_one()
+ collection = client._get_collection_reference("collectionId")
+
+ self.assertIs(collection._client, client)
+ self.assertIsInstance(collection, AsyncCollectionReference)
+
+ def test_collection_group(self):
+ client = self._make_default_one()
+ query = client.collection_group("collectionId").where("foo", "==", "bar")
+
+ self.assertTrue(query._all_descendants)
+ self.assertEqual(query._field_filters[0].field.field_path, "foo")
+ self.assertEqual(query._field_filters[0].value.string_value, "bar")
+ self.assertEqual(
+ query._field_filters[0].op, query._field_filters[0].Operator.EQUAL
+ )
+ self.assertEqual(query._parent.id, "collectionId")
+
+ def test_collection_group_no_slashes(self):
+ client = self._make_default_one()
+ with self.assertRaises(ValueError):
+ client.collection_group("foo/bar")
+
+ def test_document_factory(self):
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+
+ parts = ("rooms", "roomA")
+ client = self._make_default_one()
+ doc_path = "/".join(parts)
+ document1 = client.document(doc_path)
+
+ self.assertEqual(document1._path, parts)
+ self.assertIs(document1._client, client)
+ self.assertIsInstance(document1, AsyncDocumentReference)
+
+ # Make sure using segments gives the same result.
+ document2 = client.document(*parts)
+ self.assertEqual(document2._path, parts)
+ self.assertIs(document2._client, client)
+ self.assertIsInstance(document2, AsyncDocumentReference)
+
+ def test_document_factory_w_absolute_path(self):
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+
+ parts = ("rooms", "roomA")
+ client = self._make_default_one()
+ doc_path = "/".join(parts)
+ to_match = client.document(doc_path)
+ document1 = client.document(to_match._document_path)
+
+ self.assertEqual(document1._path, parts)
+ self.assertIs(document1._client, client)
+ self.assertIsInstance(document1, AsyncDocumentReference)
+
+ def test_document_factory_w_nested_path(self):
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+
+ client = self._make_default_one()
+ parts = ("rooms", "roomA", "shoes", "dressy")
+ doc_path = "/".join(parts)
+ document1 = client.document(doc_path)
+
+ self.assertEqual(document1._path, parts)
+ self.assertIs(document1._client, client)
+ self.assertIsInstance(document1, AsyncDocumentReference)
+
+ # Make sure using segments gives the same result.
+ document2 = client.document(*parts)
+ self.assertEqual(document2._path, parts)
+ self.assertIs(document2._client, client)
+ self.assertIsInstance(document2, AsyncDocumentReference)
+
+ async def _collections_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+ from google.cloud.firestore_v1 import _helpers
+
+ collection_ids = ["users", "projects"]
+
+ class Pager(object):
+ async def __aiter__(self, **_):
+ for collection_id in collection_ids:
+ yield collection_id
+
+ firestore_api = AsyncMock()
+ firestore_api.mock_add_spec(spec=["list_collection_ids"])
+ firestore_api.list_collection_ids.return_value = Pager()
+
+ client = self._make_default_one()
+ client._firestore_api_internal = firestore_api
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ collections = [c async for c in client.collections(**kwargs)]
+
+ self.assertEqual(len(collections), len(collection_ids))
+ for collection, collection_id in zip(collections, collection_ids):
+ self.assertIsInstance(collection, AsyncCollectionReference)
+ self.assertEqual(collection.parent, None)
+ self.assertEqual(collection.id, collection_id)
+
+ base_path = client._database_string + "/documents"
+ firestore_api.list_collection_ids.assert_called_once_with(
+ request={"parent": base_path}, metadata=client._rpc_metadata, **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_collections(self):
+ await self._collections_helper()
+
+ @pytest.mark.asyncio
+ async def test_collections_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._collections_helper(retry=retry, timeout=timeout)
+
+ async def _invoke_get_all(self, client, references, document_pbs, **kwargs):
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["batch_get_documents"])
+ response_iterator = AsyncIter(document_pbs)
+ firestore_api.batch_get_documents.return_value = response_iterator
+
+ # Attach the fake GAPIC to a real client.
+ client._firestore_api_internal = firestore_api
+
+ # Actually call get_all().
+ snapshots = client.get_all(references, **kwargs)
+ self.assertIsInstance(snapshots, types.AsyncGeneratorType)
+
+ return [s async for s in snapshots]
+
+ async def _get_all_helper(
+ self, num_snapshots=2, txn_id=None, retry=None, timeout=None
+ ):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.async_document import DocumentSnapshot
+
+ client = self._make_default_one()
+
+ data1 = {"a": "cheese"}
+ document1 = client.document("pineapple", "lamp1")
+ document_pb1, read_time = _doc_get_info(document1._document_path, data1)
+ response1 = _make_batch_response(found=document_pb1, read_time=read_time)
+
+ data2 = {"b": True, "c": 18}
+ document2 = client.document("pineapple", "lamp2")
+ document, read_time = _doc_get_info(document2._document_path, data2)
+ response2 = _make_batch_response(found=document, read_time=read_time)
+
+ document3 = client.document("pineapple", "lamp3")
+ response3 = _make_batch_response(missing=document3._document_path)
+
+ expected_data = [data1, data2, None][:num_snapshots]
+ documents = [document1, document2, document3][:num_snapshots]
+ responses = [response1, response2, response3][:num_snapshots]
+ field_paths = [
+ field_path for field_path in ["a", "b", None][:num_snapshots] if field_path
+ ]
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ if txn_id is not None:
+ transaction = client.transaction()
+ transaction._id = txn_id
+ kwargs["transaction"] = transaction
+
+ snapshots = await self._invoke_get_all(
+ client, documents, responses, field_paths=field_paths, **kwargs,
+ )
+
+ self.assertEqual(len(snapshots), num_snapshots)
+
+ for data, document, snapshot in zip(expected_data, documents, snapshots):
+ self.assertIsInstance(snapshot, DocumentSnapshot)
+ self.assertIs(snapshot._reference, document)
+ if data is None:
+ self.assertFalse(snapshot.exists)
+ else:
+ self.assertEqual(snapshot._data, data)
+
+ # Verify the call to the mock.
+ doc_paths = [document._document_path for document in documents]
+ mask = common.DocumentMask(field_paths=field_paths)
+
+ kwargs.pop("transaction", None)
+
+ client._firestore_api.batch_get_documents.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "documents": doc_paths,
+ "mask": mask,
+ "transaction": txn_id,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_get_all(self):
+ await self._get_all_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_all_with_transaction(self):
+ txn_id = b"the-man-is-non-stop"
+ await self._get_all_helper(num_snapshots=1, txn_id=txn_id)
+
+ @pytest.mark.asyncio
+ async def test_get_all_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._get_all_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_get_all_wrong_order(self):
+ await self._get_all_helper(num_snapshots=3)
+
+ @pytest.mark.asyncio
+ async def test_get_all_unknown_result(self):
+ from google.cloud.firestore_v1.base_client import _BAD_DOC_TEMPLATE
+
+ client = self._make_default_one()
+
+ expected_document = client.document("pineapple", "lamp1")
+
+ data = {"z": 28.5}
+ wrong_document = client.document("pineapple", "lamp2")
+ document_pb, read_time = _doc_get_info(wrong_document._document_path, data)
+ response = _make_batch_response(found=document_pb, read_time=read_time)
+
+ # Exercise the mocked ``batch_get_documents``.
+ with self.assertRaises(ValueError) as exc_info:
+ await self._invoke_get_all(client, [expected_document], [response])
+
+ err_msg = _BAD_DOC_TEMPLATE.format(response.found.name)
+ self.assertEqual(exc_info.exception.args, (err_msg,))
+
+ # Verify the call to the mock.
+ doc_paths = [expected_document._document_path]
+ client._firestore_api.batch_get_documents.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "documents": doc_paths,
+ "mask": None,
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ def test_batch(self):
+ from google.cloud.firestore_v1.async_batch import AsyncWriteBatch
+
+ client = self._make_default_one()
+ batch = client.batch()
+ self.assertIsInstance(batch, AsyncWriteBatch)
+ self.assertIs(batch._client, client)
+ self.assertEqual(batch._write_pbs, [])
+
+ def test_transaction(self):
+ from google.cloud.firestore_v1.async_transaction import AsyncTransaction
+
+ client = self._make_default_one()
+ transaction = client.transaction(max_attempts=3, read_only=True)
+ self.assertIsInstance(transaction, AsyncTransaction)
+ self.assertEqual(transaction._write_pbs, [])
+ self.assertEqual(transaction._max_attempts, 3)
+ self.assertTrue(transaction._read_only)
+ self.assertIsNone(transaction._id)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_batch_response(**kwargs):
+ from google.cloud.firestore_v1.types import firestore
+
+ return firestore.BatchGetDocumentsResponse(**kwargs)
+
+
+def _doc_get_info(ref_string, values):
+ from google.cloud.firestore_v1.types import document
+ from google.cloud._helpers import _datetime_to_pb_timestamp
+ from google.cloud.firestore_v1 import _helpers
+
+ now = datetime.datetime.utcnow()
+ read_time = _datetime_to_pb_timestamp(now)
+ delta = datetime.timedelta(seconds=100)
+ update_time = _datetime_to_pb_timestamp(now - delta)
+ create_time = _datetime_to_pb_timestamp(now - 2 * delta)
+
+ document_pb = document.Document(
+ name=ref_string,
+ fields=_helpers.encode_dict(values),
+ create_time=create_time,
+ update_time=update_time,
+ )
+
+ return document_pb, read_time
diff --git a/tests/unit/v1/test_async_collection.py b/tests/unit/v1/test_async_collection.py
new file mode 100644
index 0000000000..4a2f30de10
--- /dev/null
+++ b/tests/unit/v1/test_async_collection.py
@@ -0,0 +1,382 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import types
+import aiounittest
+
+import mock
+from tests.unit.v1.test__helpers import AsyncMock, AsyncIter
+
+
+class TestAsyncCollectionReference(aiounittest.AsyncTestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+
+ return AsyncCollectionReference
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ @staticmethod
+ def _get_public_methods(klass):
+ return set().union(
+ *(
+ (
+ name
+ for name, value in class_.__dict__.items()
+ if (
+ not name.startswith("_")
+ and isinstance(value, types.FunctionType)
+ )
+ )
+ for class_ in (klass,) + klass.__bases__
+ )
+ )
+
+ def test_query_method_matching(self):
+ from google.cloud.firestore_v1.async_query import AsyncQuery
+
+ query_methods = self._get_public_methods(AsyncQuery)
+ klass = self._get_target_class()
+ collection_methods = self._get_public_methods(klass)
+ # Make sure every query method is present on
+ # ``AsyncCollectionReference``.
+ self.assertLessEqual(query_methods, collection_methods)
+
+ def test_constructor(self):
+ collection_id1 = "rooms"
+ document_id = "roomA"
+ collection_id2 = "messages"
+ client = mock.sentinel.client
+
+ collection = self._make_one(
+ collection_id1, document_id, collection_id2, client=client
+ )
+ self.assertIs(collection._client, client)
+ expected_path = (collection_id1, document_id, collection_id2)
+ self.assertEqual(collection._path, expected_path)
+
+ @pytest.mark.asyncio
+ async def test_add_auto_assigned(self):
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+ from google.cloud.firestore_v1 import SERVER_TIMESTAMP
+ from google.cloud.firestore_v1._helpers import pbs_for_create
+
+ # Create a minimal fake GAPIC add attach it to a real client.
+ firestore_api = AsyncMock(spec=["create_document", "commit"])
+ write_result = mock.Mock(
+ update_time=mock.sentinel.update_time, spec=["update_time"]
+ )
+ commit_response = mock.Mock(
+ write_results=[write_result],
+ spec=["write_results", "commit_time"],
+ commit_time=mock.sentinel.commit_time,
+ )
+ firestore_api.commit.return_value = commit_response
+ create_doc_response = document.Document()
+ firestore_api.create_document.return_value = create_doc_response
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a collection.
+ collection = self._make_one("grand-parent", "parent", "child", client=client)
+
+ # Actually call add() on our collection; include a transform to make
+ # sure transforms during adds work.
+ document_data = {"been": "here", "now": SERVER_TIMESTAMP}
+
+ patch = mock.patch("google.cloud.firestore_v1.base_collection._auto_id")
+ random_doc_id = "DEADBEEF"
+ with patch as patched:
+ patched.return_value = random_doc_id
+ update_time, document_ref = await collection.add(document_data)
+
+ # Verify the response and the mocks.
+ self.assertIs(update_time, mock.sentinel.update_time)
+ self.assertIsInstance(document_ref, AsyncDocumentReference)
+ self.assertIs(document_ref._client, client)
+ expected_path = collection._path + (random_doc_id,)
+ self.assertEqual(document_ref._path, expected_path)
+
+ write_pbs = pbs_for_create(document_ref._document_path, document_data)
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+ # Since we generate the ID locally, we don't call 'create_document'.
+ firestore_api.create_document.assert_not_called()
+
+ @staticmethod
+ def _write_pb_for_create(document_path, document_data):
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+ from google.cloud.firestore_v1 import _helpers
+
+ return write.Write(
+ update=document.Document(
+ name=document_path, fields=_helpers.encode_dict(document_data)
+ ),
+ current_document=common.Precondition(exists=False),
+ )
+
+ async def _add_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+ from google.cloud.firestore_v1 import _helpers
+
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["commit"])
+ write_result = mock.Mock(
+ update_time=mock.sentinel.update_time, spec=["update_time"]
+ )
+ commit_response = mock.Mock(
+ write_results=[write_result],
+ spec=["write_results", "commit_time"],
+ commit_time=mock.sentinel.commit_time,
+ )
+ firestore_api.commit.return_value = commit_response
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a collection and call add().
+ collection = self._make_one("parent", client=client)
+ document_data = {"zorp": 208.75, "i-did-not": b"know that"}
+ doc_id = "child"
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ update_time, document_ref = await collection.add(
+ document_data, document_id=doc_id, **kwargs,
+ )
+
+ # Verify the response and the mocks.
+ self.assertIs(update_time, mock.sentinel.update_time)
+ self.assertIsInstance(document_ref, AsyncDocumentReference)
+ self.assertIs(document_ref._client, client)
+ self.assertEqual(document_ref._path, (collection.id, doc_id))
+
+ write_pb = self._write_pb_for_create(document_ref._document_path, document_data)
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_add_explicit_id(self):
+ await self._add_helper()
+
+ @pytest.mark.asyncio
+ async def test_add_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._add_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def _list_documents_helper(self, page_size=None, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+ from google.api_core.page_iterator_async import AsyncIterator
+ from google.api_core.page_iterator import Page
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+ from google.cloud.firestore_v1.types.document import Document
+
+ class _AsyncIterator(AsyncIterator):
+ def __init__(self, pages):
+ super(_AsyncIterator, self).__init__(client=None)
+ self._pages = pages
+
+ async def _next_page(self):
+ if self._pages:
+ page, self._pages = self._pages[0], self._pages[1:]
+ return Page(self, page, self.item_to_value)
+
+ client = _make_client()
+ template = client._database_string + "/documents/{}"
+ document_ids = ["doc-1", "doc-2"]
+ documents = [
+ Document(name=template.format(document_id)) for document_id in document_ids
+ ]
+ iterator = _AsyncIterator(pages=[documents])
+ firestore_api = AsyncMock()
+ firestore_api.mock_add_spec(spec=["list_documents"])
+ firestore_api.list_documents.return_value = iterator
+ client._firestore_api_internal = firestore_api
+ collection = self._make_one("collection", client=client)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ if page_size is not None:
+ documents = [
+ i
+ async for i in collection.list_documents(page_size=page_size, **kwargs,)
+ ]
+ else:
+ documents = [i async for i in collection.list_documents(**kwargs)]
+
+ # Verify the response and the mocks.
+ self.assertEqual(len(documents), len(document_ids))
+ for document, document_id in zip(documents, document_ids):
+ self.assertIsInstance(document, AsyncDocumentReference)
+ self.assertEqual(document.parent, collection)
+ self.assertEqual(document.id, document_id)
+
+ parent, _ = collection._parent_info()
+ firestore_api.list_documents.assert_called_once_with(
+ request={
+ "parent": parent,
+ "collection_id": collection.id,
+ "page_size": page_size,
+ "show_missing": True,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_list_documents_wo_page_size(self):
+ await self._list_documents_helper()
+
+ @pytest.mark.asyncio
+ async def test_list_documents_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._list_documents_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_list_documents_w_page_size(self):
+ await self._list_documents_helper(page_size=25)
+
+ @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True)
+ @pytest.mark.asyncio
+ async def test_get(self, query_class):
+ collection = self._make_one("collection")
+ get_response = await collection.get()
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+
+ self.assertIs(get_response, query_instance.get.return_value)
+ query_instance.get.assert_called_once_with(transaction=None)
+
+ @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True)
+ @pytest.mark.asyncio
+ async def test_get_w_retry_timeout(self, query_class):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ collection = self._make_one("collection")
+ get_response = await collection.get(retry=retry, timeout=timeout)
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+
+ self.assertIs(get_response, query_instance.get.return_value)
+ query_instance.get.assert_called_once_with(
+ transaction=None, retry=retry, timeout=timeout,
+ )
+
+ @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True)
+ @pytest.mark.asyncio
+ async def test_get_with_transaction(self, query_class):
+ collection = self._make_one("collection")
+ transaction = mock.sentinel.txn
+ get_response = await collection.get(transaction=transaction)
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+
+ self.assertIs(get_response, query_instance.get.return_value)
+ query_instance.get.assert_called_once_with(transaction=transaction)
+
+ @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True)
+ @pytest.mark.asyncio
+ async def test_stream(self, query_class):
+ query_class.return_value.stream.return_value = AsyncIter(range(3))
+
+ collection = self._make_one("collection")
+ stream_response = collection.stream()
+
+ async for _ in stream_response:
+ pass
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+ query_instance.stream.assert_called_once_with(transaction=None)
+
+ @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True)
+ @pytest.mark.asyncio
+ async def test_stream_w_retry_timeout(self, query_class):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ query_class.return_value.stream.return_value = AsyncIter(range(3))
+
+ collection = self._make_one("collection")
+ stream_response = collection.stream(retry=retry, timeout=timeout)
+
+ async for _ in stream_response:
+ pass
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+ query_instance.stream.assert_called_once_with(
+ transaction=None, retry=retry, timeout=timeout,
+ )
+
+ @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True)
+ @pytest.mark.asyncio
+ async def test_stream_with_transaction(self, query_class):
+ query_class.return_value.stream.return_value = AsyncIter(range(3))
+
+ collection = self._make_one("collection")
+ transaction = mock.sentinel.txn
+ stream_response = collection.stream(transaction=transaction)
+
+ async for _ in stream_response:
+ pass
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+ query_instance.stream.assert_called_once_with(transaction=transaction)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_client():
+ from google.cloud.firestore_v1.async_client import AsyncClient
+
+ credentials = _make_credentials()
+ return AsyncClient(project="project-project", credentials=credentials)
diff --git a/tests/unit/v1/test_async_document.py b/tests/unit/v1/test_async_document.py
new file mode 100644
index 0000000000..606652646e
--- /dev/null
+++ b/tests/unit/v1/test_async_document.py
@@ -0,0 +1,566 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import collections
+import aiounittest
+
+import mock
+from tests.unit.v1.test__helpers import AsyncMock
+
+
+class TestAsyncDocumentReference(aiounittest.AsyncTestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+
+ return AsyncDocumentReference
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ collection_id1 = "users"
+ document_id1 = "alovelace"
+ collection_id2 = "platform"
+ document_id2 = "*nix"
+ client = mock.MagicMock()
+ client.__hash__.return_value = 1234
+
+ document = self._make_one(
+ collection_id1, document_id1, collection_id2, document_id2, client=client
+ )
+ self.assertIs(document._client, client)
+ expected_path = "/".join(
+ (collection_id1, document_id1, collection_id2, document_id2)
+ )
+ self.assertEqual(document.path, expected_path)
+
+ @staticmethod
+ def _make_commit_repsonse(write_results=None):
+ from google.cloud.firestore_v1.types import firestore
+
+ response = mock.create_autospec(firestore.CommitResponse)
+ response.write_results = write_results or [mock.sentinel.write_result]
+ response.commit_time = mock.sentinel.commit_time
+ return response
+
+ @staticmethod
+ def _write_pb_for_create(document_path, document_data):
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+ from google.cloud.firestore_v1 import _helpers
+
+ return write.Write(
+ update=document.Document(
+ name=document_path, fields=_helpers.encode_dict(document_data)
+ ),
+ current_document=common.Precondition(exists=False),
+ )
+
+ async def _create_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock()
+ firestore_api.commit.mock_add_spec(spec=["commit"])
+ firestore_api.commit.return_value = self._make_commit_repsonse()
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("dignity")
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a document and call create().
+ document = self._make_one("foo", "twelve", client=client)
+ document_data = {"hello": "goodbye", "count": 99}
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ write_result = await document.create(document_data, **kwargs)
+
+ # Verify the response and the mocks.
+ self.assertIs(write_result, mock.sentinel.write_result)
+ write_pb = self._write_pb_for_create(document._document_path, document_data)
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_create(self):
+ await self._create_helper()
+
+ @pytest.mark.asyncio
+ async def test_create_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._create_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_create_empty(self):
+ # Create a minimal fake GAPIC with a dummy response.
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+ from google.cloud.firestore_v1.async_document import DocumentSnapshot
+
+ firestore_api = AsyncMock(spec=["commit"])
+ document_reference = mock.create_autospec(AsyncDocumentReference)
+ snapshot = mock.create_autospec(DocumentSnapshot)
+ snapshot.exists = True
+ document_reference.get.return_value = snapshot
+ firestore_api.commit.return_value = self._make_commit_repsonse(
+ write_results=[document_reference]
+ )
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("dignity")
+ client._firestore_api_internal = firestore_api
+ client.get_all = mock.MagicMock()
+ client.get_all.exists.return_value = True
+
+ # Actually make a document and call create().
+ document = self._make_one("foo", "twelve", client=client)
+ document_data = {}
+ write_result = await document.create(document_data)
+ self.assertTrue((await write_result.get()).exists)
+
+ @staticmethod
+ def _write_pb_for_set(document_path, document_data, merge):
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+ from google.cloud.firestore_v1 import _helpers
+
+ write_pbs = write.Write(
+ update=document.Document(
+ name=document_path, fields=_helpers.encode_dict(document_data)
+ )
+ )
+ if merge:
+ field_paths = [
+ field_path
+ for field_path, value in _helpers.extract_fields(
+ document_data, _helpers.FieldPath()
+ )
+ ]
+ field_paths = [
+ field_path.to_api_repr() for field_path in sorted(field_paths)
+ ]
+ mask = common.DocumentMask(field_paths=sorted(field_paths))
+ write_pbs._pb.update_mask.CopyFrom(mask._pb)
+ return write_pbs
+
+ @pytest.mark.asyncio
+ async def _set_helper(self, merge=False, retry=None, timeout=None, **option_kwargs):
+ from google.cloud.firestore_v1 import _helpers
+
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["commit"])
+ firestore_api.commit.return_value = self._make_commit_repsonse()
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("db-dee-bee")
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a document and call create().
+ document = self._make_one("User", "Interface", client=client)
+ document_data = {"And": 500, "Now": b"\xba\xaa\xaa \xba\xaa\xaa"}
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ write_result = await document.set(document_data, merge, **kwargs)
+
+ # Verify the response and the mocks.
+ self.assertIs(write_result, mock.sentinel.write_result)
+ write_pb = self._write_pb_for_set(document._document_path, document_data, merge)
+
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_set(self):
+ await self._set_helper()
+
+ @pytest.mark.asyncio
+ async def test_set_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._set_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_set_merge(self):
+ await self._set_helper(merge=True)
+
+ @staticmethod
+ def _write_pb_for_update(document_path, update_values, field_paths):
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+ from google.cloud.firestore_v1 import _helpers
+
+ return write.Write(
+ update=document.Document(
+ name=document_path, fields=_helpers.encode_dict(update_values)
+ ),
+ update_mask=common.DocumentMask(field_paths=field_paths),
+ current_document=common.Precondition(exists=True),
+ )
+
+ @pytest.mark.asyncio
+ async def _update_helper(self, retry=None, timeout=None, **option_kwargs):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.transforms import DELETE_FIELD
+
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["commit"])
+ firestore_api.commit.return_value = self._make_commit_repsonse()
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("potato-chip")
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a document and call create().
+ document = self._make_one("baked", "Alaska", client=client)
+ # "Cheat" and use OrderedDict-s so that iteritems() is deterministic.
+ field_updates = collections.OrderedDict(
+ (("hello", 1), ("then.do", False), ("goodbye", DELETE_FIELD))
+ )
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ if option_kwargs:
+ option = client.write_option(**option_kwargs)
+ write_result = await document.update(field_updates, option=option, **kwargs)
+ else:
+ option = None
+ write_result = await document.update(field_updates, **kwargs)
+
+ # Verify the response and the mocks.
+ self.assertIs(write_result, mock.sentinel.write_result)
+ update_values = {
+ "hello": field_updates["hello"],
+ "then": {"do": field_updates["then.do"]},
+ }
+ field_paths = list(field_updates.keys())
+ write_pb = self._write_pb_for_update(
+ document._document_path, update_values, sorted(field_paths)
+ )
+ if option is not None:
+ option.modify_write(write_pb)
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_update_with_exists(self):
+ with self.assertRaises(ValueError):
+ await self._update_helper(exists=True)
+
+ @pytest.mark.asyncio
+ async def test_update(self):
+ await self._update_helper()
+
+ @pytest.mark.asyncio
+ async def test_update_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._update_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_update_with_precondition(self):
+ from google.protobuf import timestamp_pb2
+
+ timestamp = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244)
+ await self._update_helper(last_update_time=timestamp)
+
+ @pytest.mark.asyncio
+ async def test_empty_update(self):
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["commit"])
+ firestore_api.commit.return_value = self._make_commit_repsonse()
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("potato-chip")
+ client._firestore_api_internal = firestore_api
+
+ # Actually make a document and call create().
+ document = self._make_one("baked", "Alaska", client=client)
+ # "Cheat" and use OrderedDict-s so that iteritems() is deterministic.
+ field_updates = {}
+ with self.assertRaises(ValueError):
+ await document.update(field_updates)
+
+ @pytest.mark.asyncio
+ async def _delete_helper(self, retry=None, timeout=None, **option_kwargs):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import write
+
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["commit"])
+ firestore_api.commit.return_value = self._make_commit_repsonse()
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client("donut-base")
+ client._firestore_api_internal = firestore_api
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ # Actually make a document and call delete().
+ document = self._make_one("where", "we-are", client=client)
+ if option_kwargs:
+ option = client.write_option(**option_kwargs)
+ delete_time = await document.delete(option=option, **kwargs)
+ else:
+ option = None
+ delete_time = await document.delete(**kwargs)
+
+ # Verify the response and the mocks.
+ self.assertIs(delete_time, mock.sentinel.commit_time)
+ write_pb = write.Write(delete=document._document_path)
+ if option is not None:
+ option.modify_write(write_pb)
+ firestore_api.commit.assert_called_once_with(
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_delete(self):
+ await self._delete_helper()
+
+ @pytest.mark.asyncio
+ async def test_delete_with_option(self):
+ from google.protobuf import timestamp_pb2
+
+ timestamp_pb = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244)
+ await self._delete_helper(last_update_time=timestamp_pb)
+
+ @pytest.mark.asyncio
+ async def test_delete_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._delete_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def _get_helper(
+ self,
+ field_paths=None,
+ use_transaction=False,
+ not_found=False,
+ retry=None,
+ timeout=None,
+ ):
+ from google.api_core.exceptions import NotFound
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.transaction import Transaction
+
+ # Create a minimal fake GAPIC with a dummy response.
+ create_time = 123
+ update_time = 234
+ firestore_api = AsyncMock(spec=["get_document"])
+ response = mock.create_autospec(document.Document)
+ response.fields = {}
+ response.create_time = create_time
+ response.update_time = update_time
+
+ if not_found:
+ firestore_api.get_document.side_effect = NotFound("testing")
+ else:
+ firestore_api.get_document.return_value = response
+
+ client = _make_client("donut-base")
+ client._firestore_api_internal = firestore_api
+
+ document = self._make_one("where", "we-are", client=client)
+
+ if use_transaction:
+ transaction = Transaction(client)
+ transaction_id = transaction._id = b"asking-me-2"
+ else:
+ transaction = None
+
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ snapshot = await document.get(
+ field_paths=field_paths, transaction=transaction, **kwargs,
+ )
+
+ self.assertIs(snapshot.reference, document)
+ if not_found:
+ self.assertIsNone(snapshot._data)
+ self.assertFalse(snapshot.exists)
+ self.assertIsNone(snapshot.read_time)
+ self.assertIsNone(snapshot.create_time)
+ self.assertIsNone(snapshot.update_time)
+ else:
+ self.assertEqual(snapshot.to_dict(), {})
+ self.assertTrue(snapshot.exists)
+ self.assertIsNone(snapshot.read_time)
+ self.assertIs(snapshot.create_time, create_time)
+ self.assertIs(snapshot.update_time, update_time)
+
+ # Verify the request made to the API
+ if field_paths is not None:
+ mask = common.DocumentMask(field_paths=sorted(field_paths))
+ else:
+ mask = None
+
+ if use_transaction:
+ expected_transaction_id = transaction_id
+ else:
+ expected_transaction_id = None
+
+ firestore_api.get_document.assert_called_once_with(
+ request={
+ "name": document._document_path,
+ "mask": mask,
+ "transaction": expected_transaction_id,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_get_not_found(self):
+ await self._get_helper(not_found=True)
+
+ @pytest.mark.asyncio
+ async def test_get_default(self):
+ await self._get_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._get_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_get_w_string_field_path(self):
+ with self.assertRaises(ValueError):
+ await self._get_helper(field_paths="foo")
+
+ @pytest.mark.asyncio
+ async def test_get_with_field_path(self):
+ await self._get_helper(field_paths=["foo"])
+
+ @pytest.mark.asyncio
+ async def test_get_with_multiple_field_paths(self):
+ await self._get_helper(field_paths=["foo", "bar.baz"])
+
+ @pytest.mark.asyncio
+ async def test_get_with_transaction(self):
+ await self._get_helper(use_transaction=True)
+
+ @pytest.mark.asyncio
+ async def _collections_helper(self, page_size=None, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+
+ collection_ids = ["coll-1", "coll-2"]
+
+ class Pager(object):
+ async def __aiter__(self, **_):
+ for collection_id in collection_ids:
+ yield collection_id
+
+ firestore_api = AsyncMock()
+ firestore_api.mock_add_spec(spec=["list_collection_ids"])
+ firestore_api.list_collection_ids.return_value = Pager()
+
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ # Actually make a document and call delete().
+ document = self._make_one("where", "we-are", client=client)
+ if page_size is not None:
+ collections = [
+ c async for c in document.collections(page_size=page_size, **kwargs)
+ ]
+ else:
+ collections = [c async for c in document.collections(**kwargs)]
+
+ # Verify the response and the mocks.
+ self.assertEqual(len(collections), len(collection_ids))
+ for collection, collection_id in zip(collections, collection_ids):
+ self.assertIsInstance(collection, AsyncCollectionReference)
+ self.assertEqual(collection.parent, document)
+ self.assertEqual(collection.id, collection_id)
+
+ firestore_api.list_collection_ids.assert_called_once_with(
+ request={"parent": document._document_path, "page_size": page_size},
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_collections(self):
+ await self._collections_helper()
+
+ @pytest.mark.asyncio
+ async def test_collections_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._collections_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_collections_w_page_size(self):
+ await self._collections_helper(page_size=10)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_client(project="project-project"):
+ from google.cloud.firestore_v1.async_client import AsyncClient
+
+ credentials = _make_credentials()
+ return AsyncClient(project=project, credentials=credentials)
diff --git a/tests/unit/v1/test_async_query.py b/tests/unit/v1/test_async_query.py
new file mode 100644
index 0000000000..42514c798e
--- /dev/null
+++ b/tests/unit/v1/test_async_query.py
@@ -0,0 +1,604 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import types
+import aiounittest
+
+import mock
+from tests.unit.v1.test__helpers import AsyncMock, AsyncIter
+from tests.unit.v1.test_base_query import (
+ _make_credentials,
+ _make_query_response,
+ _make_cursor_pb,
+)
+
+
+class TestAsyncQuery(aiounittest.AsyncTestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.async_query import AsyncQuery
+
+ return AsyncQuery
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ query = self._make_one(mock.sentinel.parent)
+ self.assertIs(query._parent, mock.sentinel.parent)
+ self.assertIsNone(query._projection)
+ self.assertEqual(query._field_filters, ())
+ self.assertEqual(query._orders, ())
+ self.assertIsNone(query._limit)
+ self.assertIsNone(query._offset)
+ self.assertIsNone(query._start_at)
+ self.assertIsNone(query._end_at)
+ self.assertFalse(query._all_descendants)
+
+ async def _get_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dee")
+
+ # Add a dummy response to the minimal fake GAPIC.
+ _, expected_prefix = parent._parent_info()
+ name = "{}/sleep".format(expected_prefix)
+ data = {"snooze": 10}
+
+ response_pb = _make_query_response(name=name, data=data)
+ firestore_api.run_query.return_value = AsyncIter([response_pb])
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ returned = await query.get(**kwargs)
+
+ self.assertIsInstance(returned, list)
+ self.assertEqual(len(returned), 1)
+
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
+ self.assertEqual(snapshot.to_dict(), data)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_get(self):
+ await self._get_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._get_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_get_limit_to_last(self):
+ from google.cloud import firestore
+ from google.cloud.firestore_v1.base_query import _enum_from_direction
+
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dee")
+
+ # Add a dummy response to the minimal fake GAPIC.
+ _, expected_prefix = parent._parent_info()
+ name = "{}/sleep".format(expected_prefix)
+ data = {"snooze": 10}
+ data2 = {"snooze": 20}
+
+ response_pb = _make_query_response(name=name, data=data)
+ response_pb2 = _make_query_response(name=name, data=data2)
+
+ firestore_api.run_query.return_value = AsyncIter([response_pb2, response_pb])
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ query = query.order_by(
+ "snooze", direction=firestore.AsyncQuery.DESCENDING
+ ).limit_to_last(2)
+ returned = await query.get()
+
+ self.assertIsInstance(returned, list)
+ self.assertEqual(
+ query._orders[0].direction,
+ _enum_from_direction(firestore.AsyncQuery.ASCENDING),
+ )
+ self.assertEqual(len(returned), 2)
+
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
+ self.assertEqual(snapshot.to_dict(), data)
+
+ snapshot2 = returned[1]
+ self.assertEqual(snapshot2.reference._path, ("dee", "sleep"))
+ self.assertEqual(snapshot2.to_dict(), data2)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ async def _stream_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dee")
+
+ # Add a dummy response to the minimal fake GAPIC.
+ _, expected_prefix = parent._parent_info()
+ name = "{}/sleep".format(expected_prefix)
+ data = {"snooze": 10}
+ response_pb = _make_query_response(name=name, data=data)
+ firestore_api.run_query.return_value = AsyncIter([response_pb])
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+
+ get_response = query.stream(**kwargs)
+
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ returned = [x async for x in get_response]
+ self.assertEqual(len(returned), 1)
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
+ self.assertEqual(snapshot.to_dict(), data)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_stream_simple(self):
+ await self._stream_helper()
+
+ @pytest.mark.asyncio
+ async def test_stream_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._stream_helper(retry=retry, timeout=timeout)
+
+ @pytest.mark.asyncio
+ async def test_stream_with_limit_to_last(self):
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dee")
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ query = query.limit_to_last(2)
+
+ stream_response = query.stream()
+
+ with self.assertRaises(ValueError):
+ [d async for d in stream_response]
+
+ @pytest.mark.asyncio
+ async def test_stream_with_transaction(self):
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Create a real-ish transaction for this client.
+ transaction = client.transaction()
+ txn_id = b"\x00\x00\x01-work-\xf2"
+ transaction._id = txn_id
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("declaration")
+
+ # Add a dummy response to the minimal fake GAPIC.
+ parent_path, expected_prefix = parent._parent_info()
+ name = "{}/burger".format(expected_prefix)
+ data = {"lettuce": b"\xee\x87"}
+ response_pb = _make_query_response(name=name, data=data)
+ firestore_api.run_query.return_value = AsyncIter([response_pb])
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ get_response = query.stream(transaction=transaction)
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ returned = [x async for x in get_response]
+ self.assertEqual(len(returned), 1)
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("declaration", "burger"))
+ self.assertEqual(snapshot.to_dict(), data)
+
+ # Verify the mock call.
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": txn_id,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ @pytest.mark.asyncio
+ async def test_stream_no_results(self):
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["run_query"])
+ empty_response = _make_query_response()
+ run_query_response = AsyncIter([empty_response])
+ firestore_api.run_query.return_value = run_query_response
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dah", "dah", "dum")
+ query = self._make_one(parent)
+
+ get_response = query.stream()
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ self.assertEqual([x async for x in get_response], [])
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ @pytest.mark.asyncio
+ async def test_stream_second_response_in_empty_stream(self):
+ # Create a minimal fake GAPIC with a dummy response.
+ firestore_api = AsyncMock(spec=["run_query"])
+ empty_response1 = _make_query_response()
+ empty_response2 = _make_query_response()
+ run_query_response = AsyncIter([empty_response1, empty_response2])
+ firestore_api.run_query.return_value = run_query_response
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dah", "dah", "dum")
+ query = self._make_one(parent)
+
+ get_response = query.stream()
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ self.assertEqual([x async for x in get_response], [])
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ @pytest.mark.asyncio
+ async def test_stream_with_skipped_results(self):
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("talk", "and", "chew-gum")
+
+ # Add two dummy responses to the minimal fake GAPIC.
+ _, expected_prefix = parent._parent_info()
+ response_pb1 = _make_query_response(skipped_results=1)
+ name = "{}/clock".format(expected_prefix)
+ data = {"noon": 12, "nested": {"bird": 10.5}}
+ response_pb2 = _make_query_response(name=name, data=data)
+ firestore_api.run_query.return_value = AsyncIter([response_pb1, response_pb2])
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ get_response = query.stream()
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ returned = [x async for x in get_response]
+ self.assertEqual(len(returned), 1)
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("talk", "and", "chew-gum", "clock"))
+ self.assertEqual(snapshot.to_dict(), data)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ @pytest.mark.asyncio
+ async def test_stream_empty_after_first_response(self):
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("charles")
+
+ # Add two dummy responses to the minimal fake GAPIC.
+ _, expected_prefix = parent._parent_info()
+ name = "{}/bark".format(expected_prefix)
+ data = {"lee": "hoop"}
+ response_pb1 = _make_query_response(name=name, data=data)
+ response_pb2 = _make_query_response()
+ firestore_api.run_query.return_value = AsyncIter([response_pb1, response_pb2])
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ get_response = query.stream()
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ returned = [x async for x in get_response]
+ self.assertEqual(len(returned), 1)
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("charles", "bark"))
+ self.assertEqual(snapshot.to_dict(), data)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+ @pytest.mark.asyncio
+ async def test_stream_w_collection_group(self):
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["run_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("charles")
+ other = client.collection("dora")
+
+ # Add two dummy responses to the minimal fake GAPIC.
+ _, other_prefix = other._parent_info()
+ name = "{}/bark".format(other_prefix)
+ data = {"lee": "hoop"}
+ response_pb1 = _make_query_response(name=name, data=data)
+ response_pb2 = _make_query_response()
+ firestore_api.run_query.return_value = AsyncIter([response_pb1, response_pb2])
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ query._all_descendants = True
+ get_response = query.stream()
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ returned = [x async for x in get_response]
+ self.assertEqual(len(returned), 1)
+ snapshot = returned[0]
+ to_match = other.document("bark")
+ self.assertEqual(snapshot.reference._document_path, to_match._document_path)
+ self.assertEqual(snapshot.to_dict(), data)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ )
+
+
+class TestCollectionGroup(aiounittest.AsyncTestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.async_query import AsyncCollectionGroup
+
+ return AsyncCollectionGroup
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ query = self._make_one(mock.sentinel.parent)
+ self.assertIs(query._parent, mock.sentinel.parent)
+ self.assertIsNone(query._projection)
+ self.assertEqual(query._field_filters, ())
+ self.assertEqual(query._orders, ())
+ self.assertIsNone(query._limit)
+ self.assertIsNone(query._offset)
+ self.assertIsNone(query._start_at)
+ self.assertIsNone(query._end_at)
+ self.assertTrue(query._all_descendants)
+
+ def test_constructor_all_descendents_is_false(self):
+ with pytest.raises(ValueError):
+ self._make_one(mock.sentinel.parent, all_descendants=False)
+
+ @pytest.mark.asyncio
+ async def _get_partitions_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+
+ # Create a minimal fake GAPIC.
+ firestore_api = AsyncMock(spec=["partition_query"])
+
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
+
+ # Make a **real** collection reference as parent.
+ parent = client.collection("charles")
+
+ # Make two **real** document references to use as cursors
+ document1 = parent.document("one")
+ document2 = parent.document("two")
+
+ # Add cursor pb's to the minimal fake GAPIC.
+ cursor_pb1 = _make_cursor_pb(([document1], False))
+ cursor_pb2 = _make_cursor_pb(([document2], False))
+ firestore_api.partition_query.return_value = AsyncIter([cursor_pb1, cursor_pb2])
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ get_response = query.get_partitions(2, **kwargs)
+
+ self.assertIsInstance(get_response, types.AsyncGeneratorType)
+ returned = [i async for i in get_response]
+ self.assertEqual(len(returned), 3)
+
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ partition_query = self._make_one(
+ parent, orders=(query._make_order("__name__", query.ASCENDING),),
+ )
+ firestore_api.partition_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": partition_query._to_protobuf(),
+ "partition_count": 2,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
+ )
+
+ @pytest.mark.asyncio
+ async def test_get_partitions(self):
+ await self._get_partitions_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_partitions_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._get_partitions_helper(retry=retry, timeout=timeout)
+
+ async def test_get_partitions_w_filter(self):
+ # Make a **real** collection reference as parent.
+ client = _make_client()
+ parent = client.collection("charles")
+
+ # Make a query that fails to partition
+ query = self._make_one(parent).where("foo", "==", "bar")
+ with pytest.raises(ValueError):
+ [i async for i in query.get_partitions(2)]
+
+ async def test_get_partitions_w_projection(self):
+ # Make a **real** collection reference as parent.
+ client = _make_client()
+ parent = client.collection("charles")
+
+ # Make a query that fails to partition
+ query = self._make_one(parent).select("foo")
+ with pytest.raises(ValueError):
+ [i async for i in query.get_partitions(2)]
+
+ async def test_get_partitions_w_limit(self):
+ # Make a **real** collection reference as parent.
+ client = _make_client()
+ parent = client.collection("charles")
+
+ # Make a query that fails to partition
+ query = self._make_one(parent).limit(10)
+ with pytest.raises(ValueError):
+ [i async for i in query.get_partitions(2)]
+
+ async def test_get_partitions_w_offset(self):
+ # Make a **real** collection reference as parent.
+ client = _make_client()
+ parent = client.collection("charles")
+
+ # Make a query that fails to partition
+ query = self._make_one(parent).offset(10)
+ with pytest.raises(ValueError):
+ [i async for i in query.get_partitions(2)]
+
+
+def _make_client(project="project-project"):
+ from google.cloud.firestore_v1.async_client import AsyncClient
+
+ credentials = _make_credentials()
+ return AsyncClient(project=project, credentials=credentials)
diff --git a/tests/unit/v1beta1/test_transaction.py b/tests/unit/v1/test_async_transaction.py
similarity index 62%
rename from tests/unit/v1beta1/test_transaction.py
rename to tests/unit/v1/test_async_transaction.py
index 1797007495..2e0f572b07 100644
--- a/tests/unit/v1beta1/test_transaction.py
+++ b/tests/unit/v1/test_async_transaction.py
@@ -1,4 +1,4 @@
-# Copyright 2017 Google LLC All rights reserved.
+# Copyright 2020 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,25 +12,26 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import unittest
+import pytest
+import aiounittest
import mock
-import pytest
+from tests.unit.v1.test__helpers import AsyncMock
-class TestTransaction(unittest.TestCase):
+class TestAsyncTransaction(aiounittest.AsyncTestCase):
@staticmethod
def _get_target_class():
- from google.cloud.firestore_v1beta1.transaction import Transaction
+ from google.cloud.firestore_v1.async_transaction import AsyncTransaction
- return Transaction
+ return AsyncTransaction
def _make_one(self, *args, **kwargs):
klass = self._get_target_class()
return klass(*args, **kwargs)
def test_constructor_defaults(self):
- from google.cloud.firestore_v1beta1.transaction import MAX_ATTEMPTS
+ from google.cloud.firestore_v1.async_transaction import MAX_ATTEMPTS
transaction = self._make_one(mock.sentinel.client)
self.assertIs(transaction._client, mock.sentinel.client)
@@ -50,7 +51,7 @@ def test_constructor_explicit(self):
self.assertIsNone(transaction._id)
def test__add_write_pbs_failure(self):
- from google.cloud.firestore_v1beta1.transaction import _WRITE_READ_ONLY
+ from google.cloud.firestore_v1.base_transaction import _WRITE_READ_ONLY
batch = self._make_one(mock.sentinel.client, read_only=True)
self.assertEqual(batch._write_pbs, [])
@@ -66,66 +67,27 @@ def test__add_write_pbs(self):
batch._add_write_pbs([mock.sentinel.write])
self.assertEqual(batch._write_pbs, [mock.sentinel.write])
- def test__options_protobuf_read_only(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
- transaction = self._make_one(mock.sentinel.client, read_only=True)
- options_pb = transaction._options_protobuf(None)
- expected_pb = common_pb2.TransactionOptions(
- read_only=common_pb2.TransactionOptions.ReadOnly()
- )
- self.assertEqual(options_pb, expected_pb)
-
- def test__options_protobuf_read_only_retry(self):
- from google.cloud.firestore_v1beta1.transaction import _CANT_RETRY_READ_ONLY
-
- transaction = self._make_one(mock.sentinel.client, read_only=True)
- retry_id = b"illuminate"
-
- with self.assertRaises(ValueError) as exc_info:
- transaction._options_protobuf(retry_id)
-
- self.assertEqual(exc_info.exception.args, (_CANT_RETRY_READ_ONLY,))
-
- def test__options_protobuf_read_write(self):
- transaction = self._make_one(mock.sentinel.client)
- options_pb = transaction._options_protobuf(None)
- self.assertIsNone(options_pb)
-
- def test__options_protobuf_on_retry(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
+ def test__clean_up(self):
transaction = self._make_one(mock.sentinel.client)
- retry_id = b"hocus-pocus"
- options_pb = transaction._options_protobuf(retry_id)
- expected_pb = common_pb2.TransactionOptions(
- read_write=common_pb2.TransactionOptions.ReadWrite(
- retry_transaction=retry_id
- )
+ transaction._write_pbs.extend(
+ [mock.sentinel.write_pb1, mock.sentinel.write_pb2]
)
- self.assertEqual(options_pb, expected_pb)
+ transaction._id = b"not-this-time-my-friend"
- def test_in_progress_property(self):
- transaction = self._make_one(mock.sentinel.client)
- self.assertFalse(transaction.in_progress)
- transaction._id = b"not-none-bites"
- self.assertTrue(transaction.in_progress)
+ ret_val = transaction._clean_up()
+ self.assertIsNone(ret_val)
- def test_id_property(self):
- transaction = self._make_one(mock.sentinel.client)
- transaction._id = mock.sentinel.eye_dee
- self.assertIs(transaction.id, mock.sentinel.eye_dee)
+ self.assertEqual(transaction._write_pbs, [])
+ self.assertIsNone(transaction._id)
- def test__begin(self):
- from google.cloud.firestore_v1beta1.gapic import firestore_client
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
+ @pytest.mark.asyncio
+ async def test__begin(self):
+ from google.cloud.firestore_v1.types import firestore
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
txn_id = b"to-begin"
- response = firestore_pb2.BeginTransactionResponse(transaction=txn_id)
+ response = firestore.BeginTransactionResponse(transaction=txn_id)
firestore_api.begin_transaction.return_value = response
# Attach the fake GAPIC to a real client.
@@ -136,49 +98,36 @@ def test__begin(self):
transaction = self._make_one(client)
self.assertIsNone(transaction._id)
- ret_val = transaction._begin()
+ ret_val = await transaction._begin()
self.assertIsNone(ret_val)
self.assertEqual(transaction._id, txn_id)
# Verify the called mock.
firestore_api.begin_transaction.assert_called_once_with(
- client._database_string, options_=None, metadata=client._rpc_metadata
+ request={"database": client._database_string, "options": None},
+ metadata=client._rpc_metadata,
)
- def test__begin_failure(self):
- from google.cloud.firestore_v1beta1.transaction import _CANT_BEGIN
+ @pytest.mark.asyncio
+ async def test__begin_failure(self):
+ from google.cloud.firestore_v1.base_transaction import _CANT_BEGIN
client = _make_client()
transaction = self._make_one(client)
transaction._id = b"not-none"
with self.assertRaises(ValueError) as exc_info:
- transaction._begin()
+ await transaction._begin()
err_msg = _CANT_BEGIN.format(transaction._id)
self.assertEqual(exc_info.exception.args, (err_msg,))
- def test__clean_up(self):
- transaction = self._make_one(mock.sentinel.client)
- transaction._write_pbs.extend(
- [mock.sentinel.write_pb1, mock.sentinel.write_pb2]
- )
- transaction._id = b"not-this-time-my-friend"
-
- ret_val = transaction._clean_up()
- self.assertIsNone(ret_val)
-
- self.assertEqual(transaction._write_pbs, [])
- self.assertIsNone(transaction._id)
-
- def test__rollback(self):
+ @pytest.mark.asyncio
+ async def test__rollback(self):
from google.protobuf import empty_pb2
- from google.cloud.firestore_v1beta1.gapic import firestore_client
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
firestore_api.rollback.return_value = empty_pb2.Empty()
# Attach the fake GAPIC to a real client.
@@ -189,35 +138,35 @@ def test__rollback(self):
transaction = self._make_one(client)
txn_id = b"to-be-r\x00lled"
transaction._id = txn_id
- ret_val = transaction._rollback()
+ ret_val = await transaction._rollback()
self.assertIsNone(ret_val)
self.assertIsNone(transaction._id)
# Verify the called mock.
firestore_api.rollback.assert_called_once_with(
- client._database_string, txn_id, metadata=client._rpc_metadata
+ request={"database": client._database_string, "transaction": txn_id},
+ metadata=client._rpc_metadata,
)
- def test__rollback_not_allowed(self):
- from google.cloud.firestore_v1beta1.transaction import _CANT_ROLLBACK
+ @pytest.mark.asyncio
+ async def test__rollback_not_allowed(self):
+ from google.cloud.firestore_v1.base_transaction import _CANT_ROLLBACK
client = _make_client()
transaction = self._make_one(client)
self.assertIsNone(transaction._id)
with self.assertRaises(ValueError) as exc_info:
- transaction._rollback()
+ await transaction._rollback()
self.assertEqual(exc_info.exception.args, (_CANT_ROLLBACK,))
- def test__rollback_failure(self):
+ @pytest.mark.asyncio
+ async def test__rollback_failure(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.gapic import firestore_client
# Create a minimal fake GAPIC with a dummy failure.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
exc = exceptions.InternalServerError("Fire during rollback.")
firestore_api.rollback.side_effect = exc
@@ -231,7 +180,7 @@ def test__rollback_failure(self):
transaction._id = txn_id
with self.assertRaises(exceptions.InternalServerError) as exc_info:
- transaction._rollback()
+ await transaction._rollback()
self.assertIs(exc_info.exception, exc)
self.assertIsNone(transaction._id)
@@ -239,21 +188,18 @@ def test__rollback_failure(self):
# Verify the called mock.
firestore_api.rollback.assert_called_once_with(
- client._database_string, txn_id, metadata=client._rpc_metadata
+ request={"database": client._database_string, "transaction": txn_id},
+ metadata=client._rpc_metadata,
)
- def test__commit(self):
- from google.cloud.firestore_v1beta1.gapic import firestore_client
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
+ @pytest.mark.asyncio
+ async def test__commit(self):
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult()]
- )
+ firestore_api = AsyncMock()
+ commit_response = firestore.CommitResponse(write_results=[write.WriteResult()])
firestore_api.commit.return_value = commit_response
# Attach the fake GAPIC to a real client.
@@ -268,7 +214,7 @@ def test__commit(self):
transaction.set(document, {"apple": 4.5})
write_pbs = transaction._write_pbs[::]
- write_results = transaction._commit()
+ write_results = await transaction._commit()
self.assertEqual(write_results, list(commit_response.write_results))
# Make sure transaction has no more "changes".
self.assertIsNone(transaction._id)
@@ -276,30 +222,31 @@ def test__commit(self):
# Verify the mocks.
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
- def test__commit_not_allowed(self):
- from google.cloud.firestore_v1beta1.transaction import _CANT_COMMIT
+ @pytest.mark.asyncio
+ async def test__commit_not_allowed(self):
+ from google.cloud.firestore_v1.base_transaction import _CANT_COMMIT
transaction = self._make_one(mock.sentinel.client)
self.assertIsNone(transaction._id)
with self.assertRaises(ValueError) as exc_info:
- transaction._commit()
+ await transaction._commit()
self.assertEqual(exc_info.exception.args, (_CANT_COMMIT,))
- def test__commit_failure(self):
+ @pytest.mark.asyncio
+ async def test__commit_failure(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.gapic import firestore_client
# Create a minimal fake GAPIC with a dummy failure.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
exc = exceptions.InternalServerError("Fire during commit.")
firestore_api.commit.side_effect = exc
@@ -316,7 +263,7 @@ def test__commit_failure(self):
write_pbs = transaction._write_pbs[::]
with self.assertRaises(exceptions.InternalServerError) as exc_info:
- transaction._commit()
+ await transaction._commit()
self.assertIs(exc_info.exception, exc)
self.assertEqual(transaction._id, txn_id)
@@ -324,19 +271,107 @@ def test__commit_failure(self):
# Verify the called mock.
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
+ async def _get_all_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+
+ client = AsyncMock(spec=["get_all"])
+ transaction = self._make_one(client)
+ ref1, ref2 = mock.Mock(), mock.Mock()
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ result = await transaction.get_all([ref1, ref2], **kwargs)
-class Test_Transactional(unittest.TestCase):
+ client.get_all.assert_called_once_with(
+ [ref1, ref2], transaction=transaction, **kwargs,
+ )
+ self.assertIs(result, client.get_all.return_value)
+
+ @pytest.mark.asyncio
+ async def test_get_all(self):
+ await self._get_all_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_all_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._get_all_helper(retry=retry, timeout=timeout)
+
+ async def _get_w_document_ref_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1.async_document import AsyncDocumentReference
+ from google.cloud.firestore_v1 import _helpers
+
+ client = AsyncMock(spec=["get_all"])
+ transaction = self._make_one(client)
+ ref = AsyncDocumentReference("documents", "doc-id")
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ result = await transaction.get(ref, **kwargs)
+
+ client.get_all.assert_called_once_with([ref], transaction=transaction, **kwargs)
+ self.assertIs(result, client.get_all.return_value)
+
+ @pytest.mark.asyncio
+ async def test_get_w_document_ref(self):
+ await self._get_w_document_ref_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_w_document_ref_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ await self._get_w_document_ref_helper(retry=retry, timeout=timeout)
+
+ async def _get_w_query_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1.async_query import AsyncQuery
+ from google.cloud.firestore_v1 import _helpers
+
+ client = AsyncMock(spec=[])
+ transaction = self._make_one(client)
+ query = AsyncQuery(parent=AsyncMock(spec=[]))
+ query.stream = AsyncMock()
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ result = await transaction.get(query, **kwargs,)
+
+ query.stream.assert_called_once_with(
+ transaction=transaction, **kwargs,
+ )
+ self.assertIs(result, query.stream.return_value)
+
+ @pytest.mark.asyncio
+ async def test_get_w_query(self):
+ await self._get_w_query_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_w_query_w_retry_timeout(self):
+ await self._get_w_query_helper()
+
+ @pytest.mark.asyncio
+ async def test_get_failure(self):
+ client = _make_client()
+ transaction = self._make_one(client)
+ ref_or_query = object()
+ with self.assertRaises(ValueError):
+ await transaction.get(ref_or_query)
+
+
+class Test_Transactional(aiounittest.AsyncTestCase):
@staticmethod
def _get_target_class():
- from google.cloud.firestore_v1beta1.transaction import _Transactional
+ from google.cloud.firestore_v1.async_transaction import _AsyncTransactional
- return _Transactional
+ return _AsyncTransactional
def _make_one(self, *args, **kwargs):
klass = self._get_target_class()
@@ -348,24 +383,14 @@ def test_constructor(self):
self.assertIsNone(wrapped.current_id)
self.assertIsNone(wrapped.retry_id)
- def test__reset(self):
- wrapped = self._make_one(mock.sentinel.callable_)
- wrapped.current_id = b"not-none"
- wrapped.retry_id = b"also-not"
-
- ret_val = wrapped._reset()
- self.assertIsNone(ret_val)
-
- self.assertIsNone(wrapped.current_id)
- self.assertIsNone(wrapped.retry_id)
-
- def test__pre_commit_success(self):
- to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
+ @pytest.mark.asyncio
+ async def test__pre_commit_success(self):
+ to_wrap = AsyncMock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
txn_id = b"totes-began"
transaction = _make_transaction(txn_id)
- result = wrapped._pre_commit(transaction, "pos", key="word")
+ result = await wrapped._pre_commit(transaction, "pos", key="word")
self.assertIs(result, mock.sentinel.result)
self.assertEqual(transaction._id, txn_id)
@@ -376,24 +401,27 @@ def test__pre_commit_success(self):
to_wrap.assert_called_once_with(transaction, "pos", key="word")
firestore_api = transaction._client._firestore_api
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_not_called()
- def test__pre_commit_retry_id_already_set_success(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
+ @pytest.mark.asyncio
+ async def test__pre_commit_retry_id_already_set_success(self):
+ from google.cloud.firestore_v1.types import common
- to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
+ to_wrap = AsyncMock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
txn_id1 = b"already-set"
wrapped.retry_id = txn_id1
txn_id2 = b"ok-here-too"
transaction = _make_transaction(txn_id2)
- result = wrapped._pre_commit(transaction)
+ result = await wrapped._pre_commit(transaction)
self.assertIs(result, mock.sentinel.result)
self.assertEqual(transaction._id, txn_id2)
@@ -403,28 +431,29 @@ def test__pre_commit_retry_id_already_set_success(self):
# Verify mocks.
to_wrap.assert_called_once_with(transaction)
firestore_api = transaction._client._firestore_api
- options_ = common_pb2.TransactionOptions(
- read_write=common_pb2.TransactionOptions.ReadWrite(
- retry_transaction=txn_id1
- )
+ options_ = common.TransactionOptions(
+ read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id1)
)
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=options_,
+ request={
+ "database": transaction._client._database_string,
+ "options": options_,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_not_called()
- def test__pre_commit_failure(self):
+ @pytest.mark.asyncio
+ async def test__pre_commit_failure(self):
exc = RuntimeError("Nope not today.")
- to_wrap = mock.Mock(side_effect=exc, spec=[])
+ to_wrap = AsyncMock(side_effect=exc, spec=[])
wrapped = self._make_one(to_wrap)
txn_id = b"gotta-fail"
transaction = _make_transaction(txn_id)
with self.assertRaises(RuntimeError) as exc_info:
- wrapped._pre_commit(transaction, 10, 20)
+ await wrapped._pre_commit(transaction, 10, 20)
self.assertIs(exc_info.exception, exc)
self.assertIsNone(transaction._id)
@@ -435,22 +464,27 @@ def test__pre_commit_failure(self):
to_wrap.assert_called_once_with(transaction, 10, 20)
firestore_api = transaction._client._firestore_api
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_called_once_with(
- transaction._client._database_string,
- txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.commit.assert_not_called()
- def test__pre_commit_failure_with_rollback_failure(self):
+ @pytest.mark.asyncio
+ async def test__pre_commit_failure_with_rollback_failure(self):
from google.api_core import exceptions
exc1 = ValueError("I will not be only failure.")
- to_wrap = mock.Mock(side_effect=exc1, spec=[])
+ to_wrap = AsyncMock(side_effect=exc1, spec=[])
wrapped = self._make_one(to_wrap)
txn_id = b"both-will-fail"
@@ -462,7 +496,7 @@ def test__pre_commit_failure_with_rollback_failure(self):
# Try to ``_pre_commit``
with self.assertRaises(exceptions.InternalServerError) as exc_info:
- wrapped._pre_commit(transaction, a="b", c="zebra")
+ await wrapped._pre_commit(transaction, a="b", c="zebra")
self.assertIs(exc_info.exception, exc2)
self.assertIsNone(transaction._id)
@@ -472,24 +506,29 @@ def test__pre_commit_failure_with_rollback_failure(self):
# Verify mocks.
to_wrap.assert_called_once_with(transaction, a="b", c="zebra")
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_called_once_with(
- transaction._client._database_string,
- txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.commit.assert_not_called()
- def test__maybe_commit_success(self):
+ @pytest.mark.asyncio
+ async def test__maybe_commit_success(self):
wrapped = self._make_one(mock.sentinel.callable_)
txn_id = b"nyet"
transaction = _make_transaction(txn_id)
transaction._id = txn_id # We won't call ``begin()``.
- succeeded = wrapped._maybe_commit(transaction)
+ succeeded = await wrapped._maybe_commit(transaction)
self.assertTrue(succeeded)
# On success, _id is reset.
@@ -500,13 +539,16 @@ def test__maybe_commit_success(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
- def test__maybe_commit_failure_read_only(self):
+ @pytest.mark.asyncio
+ async def test__maybe_commit_failure_read_only(self):
from google.api_core import exceptions
wrapped = self._make_one(mock.sentinel.callable_)
@@ -524,7 +566,7 @@ def test__maybe_commit_failure_read_only(self):
firestore_api.commit.side_effect = exc
with self.assertRaises(exceptions.Aborted) as exc_info:
- wrapped._maybe_commit(transaction)
+ await wrapped._maybe_commit(transaction)
self.assertIs(exc_info.exception, exc)
self.assertEqual(transaction._id, txn_id)
@@ -535,13 +577,16 @@ def test__maybe_commit_failure_read_only(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
- def test__maybe_commit_failure_can_retry(self):
+ @pytest.mark.asyncio
+ async def test__maybe_commit_failure_can_retry(self):
from google.api_core import exceptions
wrapped = self._make_one(mock.sentinel.callable_)
@@ -557,7 +602,7 @@ def test__maybe_commit_failure_can_retry(self):
firestore_api = transaction._client._firestore_api
firestore_api.commit.side_effect = exc
- succeeded = wrapped._maybe_commit(transaction)
+ succeeded = await wrapped._maybe_commit(transaction)
self.assertFalse(succeeded)
self.assertEqual(transaction._id, txn_id)
@@ -568,13 +613,16 @@ def test__maybe_commit_failure_can_retry(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
- def test__maybe_commit_failure_cannot_retry(self):
+ @pytest.mark.asyncio
+ async def test__maybe_commit_failure_cannot_retry(self):
from google.api_core import exceptions
wrapped = self._make_one(mock.sentinel.callable_)
@@ -591,7 +639,7 @@ def test__maybe_commit_failure_cannot_retry(self):
firestore_api.commit.side_effect = exc
with self.assertRaises(exceptions.InternalServerError) as exc_info:
- wrapped._maybe_commit(transaction)
+ await wrapped._maybe_commit(transaction)
self.assertIs(exc_info.exception, exc)
self.assertEqual(transaction._id, txn_id)
@@ -602,19 +650,22 @@ def test__maybe_commit_failure_cannot_retry(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
- def test___call__success_first_attempt(self):
- to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
+ @pytest.mark.asyncio
+ async def test___call__success_first_attempt(self):
+ to_wrap = AsyncMock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
txn_id = b"whole-enchilada"
transaction = _make_transaction(txn_id)
- result = wrapped(transaction, "a", b="c")
+ result = await wrapped(transaction, "a", b="c")
self.assertIs(result, mock.sentinel.result)
self.assertIsNone(transaction._id)
@@ -625,25 +676,27 @@ def test___call__success_first_attempt(self):
to_wrap.assert_called_once_with(transaction, "a", b="c")
firestore_api = transaction._client._firestore_api
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={"database": transaction._client._database_string, "options": None},
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
- def test___call__success_second_attempt(self):
+ @pytest.mark.asyncio
+ async def test___call__success_second_attempt(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
- to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
+ to_wrap = AsyncMock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
txn_id = b"whole-enchilada"
@@ -654,11 +707,11 @@ def test___call__success_second_attempt(self):
firestore_api = transaction._client._firestore_api
firestore_api.commit.side_effect = [
exc,
- firestore_pb2.CommitResponse(write_results=[write_pb2.WriteResult()]),
+ firestore.CommitResponse(write_results=[write.WriteResult()]),
]
# Call the __call__-able ``wrapped``.
- result = wrapped(transaction, "a", b="c")
+ result = await wrapped(transaction, "a", b="c")
self.assertIs(result, mock.sentinel.result)
self.assertIsNone(transaction._id)
@@ -670,33 +723,37 @@ def test___call__success_second_attempt(self):
self.assertEqual(to_wrap.mock_calls, [wrapped_call, wrapped_call])
firestore_api = transaction._client._firestore_api
db_str = transaction._client._database_string
- options_ = common_pb2.TransactionOptions(
- read_write=common_pb2.TransactionOptions.ReadWrite(retry_transaction=txn_id)
+ options_ = common.TransactionOptions(
+ read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id)
)
self.assertEqual(
firestore_api.begin_transaction.mock_calls,
[
mock.call(
- db_str, options_=None, metadata=transaction._client._rpc_metadata
+ request={"database": db_str, "options": None},
+ metadata=transaction._client._rpc_metadata,
),
mock.call(
- db_str,
- options_=options_,
+ request={"database": db_str, "options": options_},
metadata=transaction._client._rpc_metadata,
),
],
)
firestore_api.rollback.assert_not_called()
commit_call = mock.call(
- db_str, [], transaction=txn_id, metadata=transaction._client._rpc_metadata
+ request={"database": db_str, "writes": [], "transaction": txn_id},
+ metadata=transaction._client._rpc_metadata,
)
self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call])
- def test___call__failure(self):
+ @pytest.mark.asyncio
+ async def test___call__failure(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.transaction import _EXCEED_ATTEMPTS_TEMPLATE
+ from google.cloud.firestore_v1.async_transaction import (
+ _EXCEED_ATTEMPTS_TEMPLATE,
+ )
- to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
+ to_wrap = AsyncMock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
txn_id = b"only-one-shot"
@@ -709,7 +766,7 @@ def test___call__failure(self):
# Call the __call__-able ``wrapped``.
with self.assertRaises(ValueError) as exc_info:
- wrapped(transaction, "here", there=1.5)
+ await wrapped(transaction, "here", there=1.5)
err_msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts)
self.assertEqual(exc_info.exception.args, (err_msg,))
@@ -721,53 +778,58 @@ def test___call__failure(self):
# Verify mocks.
to_wrap.assert_called_once_with(transaction, "here", there=1.5)
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_called_once_with(
- transaction._client._database_string,
- txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
-class Test_transactional(unittest.TestCase):
+class Test_async_transactional(aiounittest.AsyncTestCase):
@staticmethod
def _call_fut(to_wrap):
- from google.cloud.firestore_v1beta1.transaction import transactional
+ from google.cloud.firestore_v1.async_transaction import async_transactional
- return transactional(to_wrap)
+ return async_transactional(to_wrap)
def test_it(self):
- from google.cloud.firestore_v1beta1.transaction import _Transactional
+ from google.cloud.firestore_v1.async_transaction import _AsyncTransactional
wrapped = self._call_fut(mock.sentinel.callable_)
- self.assertIsInstance(wrapped, _Transactional)
+ self.assertIsInstance(wrapped, _AsyncTransactional)
self.assertIs(wrapped.to_wrap, mock.sentinel.callable_)
-class Test__commit_with_retry(unittest.TestCase):
+class Test__commit_with_retry(aiounittest.AsyncTestCase):
@staticmethod
- def _call_fut(client, write_pbs, transaction_id):
- from google.cloud.firestore_v1beta1.transaction import _commit_with_retry
+ @pytest.mark.asyncio
+ async def _call_fut(client, write_pbs, transaction_id):
+ from google.cloud.firestore_v1.async_transaction import _commit_with_retry
- return _commit_with_retry(client, write_pbs, transaction_id)
+ return await _commit_with_retry(client, write_pbs, transaction_id)
- @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep")
- def test_success_first_attempt(self, _sleep):
- from google.cloud.firestore_v1beta1.gapic import firestore_client
+ @mock.patch("google.cloud.firestore_v1.async_transaction._sleep")
+ @pytest.mark.asyncio
+ async def test_success_first_attempt(self, _sleep):
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
# Attach the fake GAPIC to a real client.
client = _make_client("summer")
@@ -775,29 +837,30 @@ def test_success_first_attempt(self, _sleep):
# Call function and check result.
txn_id = b"cheeeeeez"
- commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id)
+ commit_response = await self._call_fut(client, mock.sentinel.write_pbs, txn_id)
self.assertIs(commit_response, firestore_api.commit.return_value)
# Verify mocks used.
_sleep.assert_not_called()
firestore_api.commit.assert_called_once_with(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
@mock.patch(
- "google.cloud.firestore_v1beta1.transaction._sleep", side_effect=[2.0, 4.0]
+ "google.cloud.firestore_v1.async_transaction._sleep", side_effect=[2.0, 4.0]
)
- def test_success_third_attempt(self, _sleep):
+ @pytest.mark.asyncio
+ async def test_success_third_attempt(self, _sleep):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.gapic import firestore_client
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
+
# Make sure the first two requests fail and the third succeeds.
firestore_api.commit.side_effect = [
exceptions.ServiceUnavailable("Server sleepy."),
@@ -811,33 +874,35 @@ def test_success_third_attempt(self, _sleep):
# Call function and check result.
txn_id = b"the-world\x00"
- commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id)
+ commit_response = await self._call_fut(client, mock.sentinel.write_pbs, txn_id)
self.assertIs(commit_response, mock.sentinel.commit_response)
# Verify mocks used.
+ # Ensure _sleep is called after commit failures, with intervals of 1 and 2 seconds
self.assertEqual(_sleep.call_count, 2)
_sleep.assert_any_call(1.0)
_sleep.assert_any_call(2.0)
# commit() called same way 3 times.
commit_call = mock.call(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
self.assertEqual(
firestore_api.commit.mock_calls, [commit_call, commit_call, commit_call]
)
- @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep")
- def test_failure_first_attempt(self, _sleep):
+ @mock.patch("google.cloud.firestore_v1.async_transaction._sleep")
+ @pytest.mark.asyncio
+ async def test_failure_first_attempt(self, _sleep):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.gapic import firestore_client
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
+
# Make sure the first request fails with an un-retryable error.
exc = exceptions.ResourceExhausted("We ran out of fries.")
firestore_api.commit.side_effect = exc
@@ -849,28 +914,29 @@ def test_failure_first_attempt(self, _sleep):
# Call function and check result.
txn_id = b"\x08\x06\x07\x05\x03\x00\x09-jenny"
with self.assertRaises(exceptions.ResourceExhausted) as exc_info:
- self._call_fut(client, mock.sentinel.write_pbs, txn_id)
+ await self._call_fut(client, mock.sentinel.write_pbs, txn_id)
self.assertIs(exc_info.exception, exc)
# Verify mocks used.
_sleep.assert_not_called()
firestore_api.commit.assert_called_once_with(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
- @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep", return_value=2.0)
- def test_failure_second_attempt(self, _sleep):
+ @mock.patch("google.cloud.firestore_v1.async_transaction._sleep", return_value=2.0)
+ @pytest.mark.asyncio
+ async def test_failure_second_attempt(self, _sleep):
from google.api_core import exceptions
- from google.cloud.firestore_v1beta1.gapic import firestore_client
# Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
+
# Make sure the first request fails retry-able and second
# fails non-retryable.
exc1 = exceptions.ServiceUnavailable("Come back next time.")
@@ -884,7 +950,7 @@ def test_failure_second_attempt(self, _sleep):
# Call function and check result.
txn_id = b"the-journey-when-and-where-well-go"
with self.assertRaises(exceptions.InternalServerError) as exc_info:
- self._call_fut(client, mock.sentinel.write_pbs, txn_id)
+ await self._call_fut(client, mock.sentinel.write_pbs, txn_id)
self.assertIs(exc_info.exception, exc2)
@@ -892,54 +958,64 @@ def test_failure_second_attempt(self, _sleep):
_sleep.assert_called_once_with(1.0)
# commit() called same way 2 times.
commit_call = mock.call(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call])
-class Test__sleep(unittest.TestCase):
+class Test__sleep(aiounittest.AsyncTestCase):
@staticmethod
- def _call_fut(current_sleep, **kwargs):
- from google.cloud.firestore_v1beta1.transaction import _sleep
+ @pytest.mark.asyncio
+ async def _call_fut(current_sleep, **kwargs):
+ from google.cloud.firestore_v1.async_transaction import _sleep
- return _sleep(current_sleep, **kwargs)
+ return await _sleep(current_sleep, **kwargs)
@mock.patch("random.uniform", return_value=5.5)
- @mock.patch("time.sleep", return_value=None)
- def test_defaults(self, sleep, uniform):
+ @mock.patch("asyncio.sleep", return_value=None)
+ @pytest.mark.asyncio
+ async def test_defaults(self, sleep, uniform):
curr_sleep = 10.0
self.assertLessEqual(uniform.return_value, curr_sleep)
- new_sleep = self._call_fut(curr_sleep)
+ new_sleep = await self._call_fut(curr_sleep)
self.assertEqual(new_sleep, 2.0 * curr_sleep)
uniform.assert_called_once_with(0.0, curr_sleep)
sleep.assert_called_once_with(uniform.return_value)
@mock.patch("random.uniform", return_value=10.5)
- @mock.patch("time.sleep", return_value=None)
- def test_explicit(self, sleep, uniform):
+ @mock.patch("asyncio.sleep", return_value=None)
+ @pytest.mark.asyncio
+ async def test_explicit(self, sleep, uniform):
curr_sleep = 12.25
self.assertLessEqual(uniform.return_value, curr_sleep)
multiplier = 1.5
- new_sleep = self._call_fut(curr_sleep, max_sleep=100.0, multiplier=multiplier)
+ new_sleep = await self._call_fut(
+ curr_sleep, max_sleep=100.0, multiplier=multiplier
+ )
self.assertEqual(new_sleep, multiplier * curr_sleep)
uniform.assert_called_once_with(0.0, curr_sleep)
sleep.assert_called_once_with(uniform.return_value)
@mock.patch("random.uniform", return_value=6.75)
- @mock.patch("time.sleep", return_value=None)
- def test_exceeds_max(self, sleep, uniform):
+ @mock.patch("asyncio.sleep", return_value=None)
+ @pytest.mark.asyncio
+ async def test_exceeds_max(self, sleep, uniform):
curr_sleep = 20.0
self.assertLessEqual(uniform.return_value, curr_sleep)
max_sleep = 38.5
- new_sleep = self._call_fut(curr_sleep, max_sleep=max_sleep, multiplier=2.0)
+ new_sleep = await self._call_fut(
+ curr_sleep, max_sleep=max_sleep, multiplier=2.0
+ )
self.assertEqual(new_sleep, max_sleep)
uniform.assert_called_once_with(0.0, curr_sleep)
@@ -953,38 +1029,31 @@ def _make_credentials():
def _make_client(project="feral-tom-cat"):
- from google.cloud.firestore_v1beta1.client import Client
+ from google.cloud.firestore_v1.client import Client
credentials = _make_credentials()
-
- with pytest.deprecated_call():
- return Client(project=project, credentials=credentials)
+ return Client(project=project, credentials=credentials)
def _make_transaction(txn_id, **txn_kwargs):
from google.protobuf import empty_pb2
- from google.cloud.firestore_v1beta1.gapic import firestore_client
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.transaction import Transaction
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
+ from google.cloud.firestore_v1.async_transaction import AsyncTransaction
# Create a fake GAPIC ...
- firestore_api = mock.create_autospec(
- firestore_client.FirestoreClient, instance=True
- )
+ firestore_api = AsyncMock()
# ... with a dummy ``BeginTransactionResponse`` result ...
- begin_response = firestore_pb2.BeginTransactionResponse(transaction=txn_id)
+ begin_response = firestore.BeginTransactionResponse(transaction=txn_id)
firestore_api.begin_transaction.return_value = begin_response
# ... and a dummy ``Rollback`` result ...
firestore_api.rollback.return_value = empty_pb2.Empty()
# ... and a dummy ``Commit`` result.
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult()]
- )
+ commit_response = firestore.CommitResponse(write_results=[write.WriteResult()])
firestore_api.commit.return_value = commit_response
# Attach the fake GAPIC to a real client.
client = _make_client()
client._firestore_api_internal = firestore_api
- return Transaction(client, **txn_kwargs)
+ return AsyncTransaction(client, **txn_kwargs)
diff --git a/tests/unit/v1/test_base_batch.py b/tests/unit/v1/test_base_batch.py
new file mode 100644
index 0000000000..affe0e1395
--- /dev/null
+++ b/tests/unit/v1/test_base_batch.py
@@ -0,0 +1,172 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+import mock
+
+
+class TestBaseWriteBatch(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.base_batch import BaseWriteBatch
+
+ return BaseWriteBatch
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ batch = self._make_one(mock.sentinel.client)
+ self.assertIs(batch._client, mock.sentinel.client)
+ self.assertEqual(batch._write_pbs, [])
+ self.assertIsNone(batch.write_results)
+ self.assertIsNone(batch.commit_time)
+
+ def test__add_write_pbs(self):
+ batch = self._make_one(mock.sentinel.client)
+ self.assertEqual(batch._write_pbs, [])
+ batch._add_write_pbs([mock.sentinel.write1, mock.sentinel.write2])
+ self.assertEqual(batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2])
+
+ def test_create(self):
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+
+ client = _make_client()
+ batch = self._make_one(client)
+ self.assertEqual(batch._write_pbs, [])
+
+ reference = client.document("this", "one")
+ document_data = {"a": 10, "b": 2.5}
+ ret_val = batch.create(reference, document_data)
+ self.assertIsNone(ret_val)
+ new_write_pb = write.Write(
+ update=document.Document(
+ name=reference._document_path,
+ fields={
+ "a": _value_pb(integer_value=document_data["a"]),
+ "b": _value_pb(double_value=document_data["b"]),
+ },
+ ),
+ current_document=common.Precondition(exists=False),
+ )
+ self.assertEqual(batch._write_pbs, [new_write_pb])
+
+ def test_set(self):
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+
+ client = _make_client()
+ batch = self._make_one(client)
+ self.assertEqual(batch._write_pbs, [])
+
+ reference = client.document("another", "one")
+ field = "zapzap"
+ value = u"meadows and flowers"
+ document_data = {field: value}
+ ret_val = batch.set(reference, document_data)
+ self.assertIsNone(ret_val)
+ new_write_pb = write.Write(
+ update=document.Document(
+ name=reference._document_path,
+ fields={field: _value_pb(string_value=value)},
+ )
+ )
+ self.assertEqual(batch._write_pbs, [new_write_pb])
+
+ def test_set_merge(self):
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+
+ client = _make_client()
+ batch = self._make_one(client)
+ self.assertEqual(batch._write_pbs, [])
+
+ reference = client.document("another", "one")
+ field = "zapzap"
+ value = u"meadows and flowers"
+ document_data = {field: value}
+ ret_val = batch.set(reference, document_data, merge=True)
+ self.assertIsNone(ret_val)
+ new_write_pb = write.Write(
+ update=document.Document(
+ name=reference._document_path,
+ fields={field: _value_pb(string_value=value)},
+ ),
+ update_mask={"field_paths": [field]},
+ )
+ self.assertEqual(batch._write_pbs, [new_write_pb])
+
+ def test_update(self):
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
+
+ client = _make_client()
+ batch = self._make_one(client)
+ self.assertEqual(batch._write_pbs, [])
+
+ reference = client.document("cats", "cradle")
+ field_path = "head.foot"
+ value = u"knees toes shoulders"
+ field_updates = {field_path: value}
+
+ ret_val = batch.update(reference, field_updates)
+ self.assertIsNone(ret_val)
+
+ map_pb = document.MapValue(fields={"foot": _value_pb(string_value=value)})
+ new_write_pb = write.Write(
+ update=document.Document(
+ name=reference._document_path,
+ fields={"head": _value_pb(map_value=map_pb)},
+ ),
+ update_mask=common.DocumentMask(field_paths=[field_path]),
+ current_document=common.Precondition(exists=True),
+ )
+ self.assertEqual(batch._write_pbs, [new_write_pb])
+
+ def test_delete(self):
+ from google.cloud.firestore_v1.types import write
+
+ client = _make_client()
+ batch = self._make_one(client)
+ self.assertEqual(batch._write_pbs, [])
+
+ reference = client.document("early", "mornin", "dawn", "now")
+ ret_val = batch.delete(reference)
+ self.assertIsNone(ret_val)
+ new_write_pb = write.Write(delete=reference._document_path)
+ self.assertEqual(batch._write_pbs, [new_write_pb])
+
+
+def _value_pb(**kwargs):
+ from google.cloud.firestore_v1.types.document import Value
+
+ return Value(**kwargs)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_client(project="seventy-nine"):
+ from google.cloud.firestore_v1.client import Client
+
+ credentials = _make_credentials()
+ return Client(project=project, credentials=credentials)
diff --git a/tests/unit/v1/test_base_client.py b/tests/unit/v1/test_base_client.py
new file mode 100644
index 0000000000..631733e075
--- /dev/null
+++ b/tests/unit/v1/test_base_client.py
@@ -0,0 +1,363 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import unittest
+
+import mock
+
+
+class TestBaseClient(unittest.TestCase):
+
+ PROJECT = "my-prahjekt"
+
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.client import Client
+
+ return Client
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def _make_default_one(self):
+ credentials = _make_credentials()
+ return self._make_one(project=self.PROJECT, credentials=credentials)
+
+ @mock.patch(
+ "google.cloud.firestore_v1.services.firestore.client.FirestoreClient",
+ autospec=True,
+ return_value=mock.sentinel.firestore_api,
+ )
+ @mock.patch(
+ "google.cloud.firestore_v1.services.firestore.transports.grpc.FirestoreGrpcTransport",
+ autospec=True,
+ )
+ def test__firestore_api_property(self, mock_channel, mock_client):
+ mock_client.DEFAULT_ENDPOINT = "endpoint"
+ client = self._make_default_one()
+ client_options = client._client_options = mock.Mock()
+ self.assertIsNone(client._firestore_api_internal)
+ firestore_api = client._firestore_api
+ self.assertIs(firestore_api, mock_client.return_value)
+ self.assertIs(firestore_api, client._firestore_api_internal)
+ mock_client.assert_called_once_with(
+ transport=client._transport, client_options=client_options
+ )
+
+ # Call again to show that it is cached, but call count is still 1.
+ self.assertIs(client._firestore_api, mock_client.return_value)
+ self.assertEqual(mock_client.call_count, 1)
+
+ @mock.patch(
+ "google.cloud.firestore_v1.services.firestore.client.FirestoreClient",
+ autospec=True,
+ return_value=mock.sentinel.firestore_api,
+ )
+ @mock.patch(
+ "google.cloud.firestore_v1.services.firestore.transports.grpc.FirestoreGrpcTransport.create_channel",
+ autospec=True,
+ )
+ def test__firestore_api_property_with_emulator(
+ self, mock_insecure_channel, mock_client
+ ):
+ emulator_host = "localhost:8081"
+ with mock.patch("os.getenv") as getenv:
+ getenv.return_value = emulator_host
+ client = self._make_default_one()
+
+ self.assertIsNone(client._firestore_api_internal)
+ firestore_api = client._firestore_api
+ self.assertIs(firestore_api, mock_client.return_value)
+ self.assertIs(firestore_api, client._firestore_api_internal)
+
+ mock_insecure_channel.assert_called_once_with(host=emulator_host)
+
+ # Call again to show that it is cached, but call count is still 1.
+ self.assertIs(client._firestore_api, mock_client.return_value)
+ self.assertEqual(mock_client.call_count, 1)
+
+ def test___database_string_property(self):
+ credentials = _make_credentials()
+ database = "cheeeeez"
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, database=database
+ )
+ self.assertIsNone(client._database_string_internal)
+ database_string = client._database_string
+ expected = "projects/{}/databases/{}".format(client.project, client._database)
+ self.assertEqual(database_string, expected)
+ self.assertIs(database_string, client._database_string_internal)
+
+ # Swap it out with a unique value to verify it is cached.
+ client._database_string_internal = mock.sentinel.cached
+ self.assertIs(client._database_string, mock.sentinel.cached)
+
+ def test___rpc_metadata_property(self):
+ credentials = _make_credentials()
+ database = "quanta"
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, database=database
+ )
+
+ self.assertEqual(
+ client._rpc_metadata,
+ [("google-cloud-resource-prefix", client._database_string)],
+ )
+
+ def test__rpc_metadata_property_with_emulator(self):
+ emulator_host = "localhost:8081"
+ with mock.patch("os.getenv") as getenv:
+ getenv.return_value = emulator_host
+
+ credentials = _make_credentials()
+ database = "quanta"
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, database=database
+ )
+
+ self.assertEqual(
+ client._rpc_metadata,
+ [
+ ("google-cloud-resource-prefix", client._database_string),
+ ("authorization", "Bearer owner"),
+ ],
+ )
+
+ def test_field_path(self):
+ klass = self._get_target_class()
+ self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c")
+
+ def test_write_option_last_update(self):
+ from google.protobuf import timestamp_pb2
+ from google.cloud.firestore_v1._helpers import LastUpdateOption
+
+ timestamp = timestamp_pb2.Timestamp(seconds=1299767599, nanos=811111097)
+
+ klass = self._get_target_class()
+ option = klass.write_option(last_update_time=timestamp)
+ self.assertIsInstance(option, LastUpdateOption)
+ self.assertEqual(option._last_update_time, timestamp)
+
+ def test_write_option_exists(self):
+ from google.cloud.firestore_v1._helpers import ExistsOption
+
+ klass = self._get_target_class()
+
+ option1 = klass.write_option(exists=False)
+ self.assertIsInstance(option1, ExistsOption)
+ self.assertFalse(option1._exists)
+
+ option2 = klass.write_option(exists=True)
+ self.assertIsInstance(option2, ExistsOption)
+ self.assertTrue(option2._exists)
+
+ def test_write_open_neither_arg(self):
+ from google.cloud.firestore_v1.base_client import _BAD_OPTION_ERR
+
+ klass = self._get_target_class()
+ with self.assertRaises(TypeError) as exc_info:
+ klass.write_option()
+
+ self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,))
+
+ def test_write_multiple_args(self):
+ from google.cloud.firestore_v1.base_client import _BAD_OPTION_ERR
+
+ klass = self._get_target_class()
+ with self.assertRaises(TypeError) as exc_info:
+ klass.write_option(exists=False, last_update_time=mock.sentinel.timestamp)
+
+ self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,))
+
+ def test_write_bad_arg(self):
+ from google.cloud.firestore_v1.base_client import _BAD_OPTION_ERR
+
+ klass = self._get_target_class()
+ with self.assertRaises(TypeError) as exc_info:
+ klass.write_option(spinach="popeye")
+
+ extra = "{!r} was provided".format("spinach")
+ self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR, extra))
+
+
+class Test__reference_info(unittest.TestCase):
+ @staticmethod
+ def _call_fut(references):
+ from google.cloud.firestore_v1.base_client import _reference_info
+
+ return _reference_info(references)
+
+ def test_it(self):
+ from google.cloud.firestore_v1.client import Client
+
+ credentials = _make_credentials()
+ client = Client(project="hi-projject", credentials=credentials)
+
+ reference1 = client.document("a", "b")
+ reference2 = client.document("a", "b", "c", "d")
+ reference3 = client.document("a", "b")
+ reference4 = client.document("f", "g")
+
+ doc_path1 = reference1._document_path
+ doc_path2 = reference2._document_path
+ doc_path3 = reference3._document_path
+ doc_path4 = reference4._document_path
+ self.assertEqual(doc_path1, doc_path3)
+
+ document_paths, reference_map = self._call_fut(
+ [reference1, reference2, reference3, reference4]
+ )
+ self.assertEqual(document_paths, [doc_path1, doc_path2, doc_path3, doc_path4])
+ # reference3 over-rides reference1.
+ expected_map = {
+ doc_path2: reference2,
+ doc_path3: reference3,
+ doc_path4: reference4,
+ }
+ self.assertEqual(reference_map, expected_map)
+
+
+class Test__get_reference(unittest.TestCase):
+ @staticmethod
+ def _call_fut(document_path, reference_map):
+ from google.cloud.firestore_v1.base_client import _get_reference
+
+ return _get_reference(document_path, reference_map)
+
+ def test_success(self):
+ doc_path = "a/b/c"
+ reference_map = {doc_path: mock.sentinel.reference}
+ self.assertIs(self._call_fut(doc_path, reference_map), mock.sentinel.reference)
+
+ def test_failure(self):
+ from google.cloud.firestore_v1.base_client import _BAD_DOC_TEMPLATE
+
+ doc_path = "1/888/call-now"
+ with self.assertRaises(ValueError) as exc_info:
+ self._call_fut(doc_path, {})
+
+ err_msg = _BAD_DOC_TEMPLATE.format(doc_path)
+ self.assertEqual(exc_info.exception.args, (err_msg,))
+
+
+class Test__parse_batch_get(unittest.TestCase):
+ @staticmethod
+ def _call_fut(get_doc_response, reference_map, client=mock.sentinel.client):
+ from google.cloud.firestore_v1.base_client import _parse_batch_get
+
+ return _parse_batch_get(get_doc_response, reference_map, client)
+
+ @staticmethod
+ def _dummy_ref_string():
+ from google.cloud.firestore_v1.base_client import DEFAULT_DATABASE
+
+ project = u"bazzzz"
+ collection_id = u"fizz"
+ document_id = u"buzz"
+ return u"projects/{}/databases/{}/documents/{}/{}".format(
+ project, DEFAULT_DATABASE, collection_id, document_id
+ )
+
+ def test_found(self):
+ from google.cloud.firestore_v1.types import document
+ from google.cloud._helpers import _datetime_to_pb_timestamp
+ from google.cloud.firestore_v1.document import DocumentSnapshot
+
+ now = datetime.datetime.utcnow()
+ read_time = _datetime_to_pb_timestamp(now)
+ delta = datetime.timedelta(seconds=100)
+ update_time = _datetime_to_pb_timestamp(now - delta)
+ create_time = _datetime_to_pb_timestamp(now - 2 * delta)
+
+ ref_string = self._dummy_ref_string()
+ document_pb = document.Document(
+ name=ref_string,
+ fields={
+ "foo": document.Value(double_value=1.5),
+ "bar": document.Value(string_value=u"skillz"),
+ },
+ create_time=create_time,
+ update_time=update_time,
+ )
+ response_pb = _make_batch_response(found=document_pb, read_time=read_time)
+
+ reference_map = {ref_string: mock.sentinel.reference}
+ snapshot = self._call_fut(response_pb, reference_map)
+ self.assertIsInstance(snapshot, DocumentSnapshot)
+ self.assertIs(snapshot._reference, mock.sentinel.reference)
+ self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
+ self.assertTrue(snapshot._exists)
+ self.assertEqual(snapshot.read_time.timestamp_pb(), read_time)
+ self.assertEqual(snapshot.create_time.timestamp_pb(), create_time)
+ self.assertEqual(snapshot.update_time.timestamp_pb(), update_time)
+
+ def test_missing(self):
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ ref_string = self._dummy_ref_string()
+ response_pb = _make_batch_response(missing=ref_string)
+ document = DocumentReference("fizz", "bazz", client=mock.sentinel.client)
+ reference_map = {ref_string: document}
+ snapshot = self._call_fut(response_pb, reference_map)
+ self.assertFalse(snapshot.exists)
+ self.assertEqual(snapshot.id, "bazz")
+ self.assertIsNone(snapshot._data)
+
+ def test_unset_result_type(self):
+ response_pb = _make_batch_response()
+ with self.assertRaises(ValueError):
+ self._call_fut(response_pb, {})
+
+ def test_unknown_result_type(self):
+ response_pb = mock.Mock()
+ response_pb._pb.mock_add_spec(spec=["WhichOneof"])
+ response_pb._pb.WhichOneof.return_value = "zoob_value"
+
+ with self.assertRaises(ValueError):
+ self._call_fut(response_pb, {})
+
+ response_pb._pb.WhichOneof.assert_called_once_with("result")
+
+
+class Test__get_doc_mask(unittest.TestCase):
+ @staticmethod
+ def _call_fut(field_paths):
+ from google.cloud.firestore_v1.base_client import _get_doc_mask
+
+ return _get_doc_mask(field_paths)
+
+ def test_none(self):
+ self.assertIsNone(self._call_fut(None))
+
+ def test_paths(self):
+ from google.cloud.firestore_v1.types import common
+
+ field_paths = ["a.b", "c"]
+ result = self._call_fut(field_paths)
+ expected = common.DocumentMask(field_paths=field_paths)
+ self.assertEqual(result, expected)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_batch_response(**kwargs):
+ from google.cloud.firestore_v1.types import firestore
+
+ return firestore.BatchGetDocumentsResponse(**kwargs)
diff --git a/tests/unit/v1/test_base_collection.py b/tests/unit/v1/test_base_collection.py
new file mode 100644
index 0000000000..01c68483a6
--- /dev/null
+++ b/tests/unit/v1/test_base_collection.py
@@ -0,0 +1,352 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+import mock
+
+
+class TestCollectionReference(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ return BaseCollectionReference
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ collection_id1 = "rooms"
+ document_id = "roomA"
+ collection_id2 = "messages"
+ client = mock.sentinel.client
+
+ collection = self._make_one(
+ collection_id1, document_id, collection_id2, client=client
+ )
+ self.assertIs(collection._client, client)
+ expected_path = (collection_id1, document_id, collection_id2)
+ self.assertEqual(collection._path, expected_path)
+
+ def test_constructor_invalid_path_empty(self):
+ with self.assertRaises(ValueError):
+ self._make_one()
+
+ def test_constructor_invalid_path_bad_collection_id(self):
+ with self.assertRaises(ValueError):
+ self._make_one(99, "doc", "bad-collection-id")
+
+ def test_constructor_invalid_path_bad_document_id(self):
+ with self.assertRaises(ValueError):
+ self._make_one("bad-document-ID", None, "sub-collection")
+
+ def test_constructor_invalid_path_bad_number_args(self):
+ with self.assertRaises(ValueError):
+ self._make_one("Just", "A-Document")
+
+ def test_constructor_invalid_kwarg(self):
+ with self.assertRaises(TypeError):
+ self._make_one("Coh-lek-shun", donut=True)
+
+ def test___eq___other_type(self):
+ client = mock.sentinel.client
+ collection = self._make_one("name", client=client)
+ other = object()
+ self.assertFalse(collection == other)
+
+ def test___eq___different_path_same_client(self):
+ client = mock.sentinel.client
+ collection = self._make_one("name", client=client)
+ other = self._make_one("other", client=client)
+ self.assertFalse(collection == other)
+
+ def test___eq___same_path_different_client(self):
+ client = mock.sentinel.client
+ other_client = mock.sentinel.other_client
+ collection = self._make_one("name", client=client)
+ other = self._make_one("name", client=other_client)
+ self.assertFalse(collection == other)
+
+ def test___eq___same_path_same_client(self):
+ client = mock.sentinel.client
+ collection = self._make_one("name", client=client)
+ other = self._make_one("name", client=client)
+ self.assertTrue(collection == other)
+
+ def test_id_property(self):
+ collection_id = "hi-bob"
+ collection = self._make_one(collection_id)
+ self.assertEqual(collection.id, collection_id)
+
+ def test_parent_property(self):
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ collection_id1 = "grocery-store"
+ document_id = "market"
+ collection_id2 = "darth"
+ client = _make_client()
+ collection = self._make_one(
+ collection_id1, document_id, collection_id2, client=client
+ )
+
+ parent = collection.parent
+ self.assertIsInstance(parent, DocumentReference)
+ self.assertIs(parent._client, client)
+ self.assertEqual(parent._path, (collection_id1, document_id))
+
+ def test_parent_property_top_level(self):
+ collection = self._make_one("tahp-leh-vull")
+ self.assertIsNone(collection.parent)
+
+ def test_document_factory_explicit_id(self):
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ collection_id = "grocery-store"
+ document_id = "market"
+ client = _make_client()
+ collection = self._make_one(collection_id, client=client)
+
+ child = collection.document(document_id)
+ self.assertIsInstance(child, DocumentReference)
+ self.assertIs(child._client, client)
+ self.assertEqual(child._path, (collection_id, document_id))
+
+ @mock.patch(
+ "google.cloud.firestore_v1.base_collection._auto_id",
+ return_value="zorpzorpthreezorp012",
+ )
+ def test_document_factory_auto_id(self, mock_auto_id):
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ collection_name = "space-town"
+ client = _make_client()
+ collection = self._make_one(collection_name, client=client)
+
+ child = collection.document()
+ self.assertIsInstance(child, DocumentReference)
+ self.assertIs(child._client, client)
+ self.assertEqual(child._path, (collection_name, mock_auto_id.return_value))
+
+ mock_auto_id.assert_called_once_with()
+
+ def test__parent_info_top_level(self):
+ client = _make_client()
+ collection_id = "soap"
+ collection = self._make_one(collection_id, client=client)
+
+ parent_path, expected_prefix = collection._parent_info()
+
+ expected_path = "projects/{}/databases/{}/documents".format(
+ client.project, client._database
+ )
+ self.assertEqual(parent_path, expected_path)
+ prefix = "{}/{}".format(expected_path, collection_id)
+ self.assertEqual(expected_prefix, prefix)
+
+ def test__parent_info_nested(self):
+ collection_id1 = "bar"
+ document_id = "baz"
+ collection_id2 = "chunk"
+ client = _make_client()
+ collection = self._make_one(
+ collection_id1, document_id, collection_id2, client=client
+ )
+
+ parent_path, expected_prefix = collection._parent_info()
+
+ expected_path = "projects/{}/databases/{}/documents/{}/{}".format(
+ client.project, client._database, collection_id1, document_id
+ )
+ self.assertEqual(parent_path, expected_path)
+ prefix = "{}/{}".format(expected_path, collection_id2)
+ self.assertEqual(expected_prefix, prefix)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_select(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ field_paths = ["a", "b"]
+ query = collection.select(field_paths)
+
+ mock_query.select.assert_called_once_with(field_paths)
+ self.assertEqual(query, mock_query.select.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_where(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ field_path = "foo"
+ op_string = "=="
+ value = 45
+ query = collection.where(field_path, op_string, value)
+
+ mock_query.where.assert_called_once_with(field_path, op_string, value)
+ self.assertEqual(query, mock_query.where.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_order_by(self, mock_query):
+ from google.cloud.firestore_v1.base_query import BaseQuery
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ field_path = "foo"
+ direction = BaseQuery.DESCENDING
+ query = collection.order_by(field_path, direction=direction)
+
+ mock_query.order_by.assert_called_once_with(field_path, direction=direction)
+ self.assertEqual(query, mock_query.order_by.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_limit(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ limit = 15
+ query = collection.limit(limit)
+
+ mock_query.limit.assert_called_once_with(limit)
+ self.assertEqual(query, mock_query.limit.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_limit_to_last(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ limit = 15
+ query = collection.limit_to_last(limit)
+
+ mock_query.limit_to_last.assert_called_once_with(limit)
+ self.assertEqual(query, mock_query.limit_to_last.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_offset(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ offset = 113
+ query = collection.offset(offset)
+
+ mock_query.offset.assert_called_once_with(offset)
+ self.assertEqual(query, mock_query.offset.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_start_at(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ doc_fields = {"a": "b"}
+ query = collection.start_at(doc_fields)
+
+ mock_query.start_at.assert_called_once_with(doc_fields)
+ self.assertEqual(query, mock_query.start_at.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_start_after(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ doc_fields = {"d": "foo", "e": 10}
+ query = collection.start_after(doc_fields)
+
+ mock_query.start_after.assert_called_once_with(doc_fields)
+ self.assertEqual(query, mock_query.start_after.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_end_before(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ doc_fields = {"bar": 10.5}
+ query = collection.end_before(doc_fields)
+
+ mock_query.end_before.assert_called_once_with(doc_fields)
+ self.assertEqual(query, mock_query.end_before.return_value)
+
+ @mock.patch("google.cloud.firestore_v1.base_query.BaseQuery", autospec=True)
+ def test_end_at(self, mock_query):
+ from google.cloud.firestore_v1.base_collection import BaseCollectionReference
+
+ with mock.patch.object(BaseCollectionReference, "_query") as _query:
+ _query.return_value = mock_query
+
+ collection = self._make_one("collection")
+ doc_fields = {"opportunity": True, "reason": 9}
+ query = collection.end_at(doc_fields)
+
+ mock_query.end_at.assert_called_once_with(doc_fields)
+ self.assertEqual(query, mock_query.end_at.return_value)
+
+
+class Test__auto_id(unittest.TestCase):
+ @staticmethod
+ def _call_fut():
+ from google.cloud.firestore_v1.base_collection import _auto_id
+
+ return _auto_id()
+
+ @mock.patch("random.choice")
+ def test_it(self, mock_rand_choice):
+ from google.cloud.firestore_v1.base_collection import _AUTO_ID_CHARS
+
+ mock_result = "0123456789abcdefghij"
+ mock_rand_choice.side_effect = list(mock_result)
+ result = self._call_fut()
+ self.assertEqual(result, mock_result)
+
+ mock_calls = [mock.call(_AUTO_ID_CHARS)] * 20
+ self.assertEqual(mock_rand_choice.mock_calls, mock_calls)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_client():
+ from google.cloud.firestore_v1.client import Client
+
+ credentials = _make_credentials()
+ return Client(project="project-project", credentials=credentials)
diff --git a/tests/unit/v1/test_base_document.py b/tests/unit/v1/test_base_document.py
new file mode 100644
index 0000000000..bba47a9848
--- /dev/null
+++ b/tests/unit/v1/test_base_document.py
@@ -0,0 +1,435 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+import mock
+from proto.datetime_helpers import DatetimeWithNanoseconds
+from google.protobuf import timestamp_pb2
+
+
+class TestBaseDocumentReference(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ return DocumentReference
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ collection_id1 = "users"
+ document_id1 = "alovelace"
+ collection_id2 = "platform"
+ document_id2 = "*nix"
+ client = mock.MagicMock()
+ client.__hash__.return_value = 1234
+
+ document = self._make_one(
+ collection_id1, document_id1, collection_id2, document_id2, client=client
+ )
+ self.assertIs(document._client, client)
+ expected_path = "/".join(
+ (collection_id1, document_id1, collection_id2, document_id2)
+ )
+ self.assertEqual(document.path, expected_path)
+
+ def test_constructor_invalid_path_empty(self):
+ with self.assertRaises(ValueError):
+ self._make_one()
+
+ def test_constructor_invalid_path_bad_collection_id(self):
+ with self.assertRaises(ValueError):
+ self._make_one(None, "before", "bad-collection-id", "fifteen")
+
+ def test_constructor_invalid_path_bad_document_id(self):
+ with self.assertRaises(ValueError):
+ self._make_one("bad-document-ID", None)
+
+ def test_constructor_invalid_path_bad_number_args(self):
+ with self.assertRaises(ValueError):
+ self._make_one("Just", "A-Collection", "Sub")
+
+ def test_constructor_invalid_kwarg(self):
+ with self.assertRaises(TypeError):
+ self._make_one("Coh-lek-shun", "Dahk-yu-mehnt", burger=18.75)
+
+ def test___copy__(self):
+ client = _make_client("rain")
+ document = self._make_one("a", "b", client=client)
+ # Access the document path so it is copied.
+ doc_path = document._document_path
+ self.assertEqual(doc_path, document._document_path_internal)
+
+ new_document = document.__copy__()
+ self.assertIsNot(new_document, document)
+ self.assertIs(new_document._client, document._client)
+ self.assertEqual(new_document._path, document._path)
+ self.assertEqual(
+ new_document._document_path_internal, document._document_path_internal
+ )
+
+ def test___deepcopy__calls_copy(self):
+ client = mock.sentinel.client
+ document = self._make_one("a", "b", client=client)
+ document.__copy__ = mock.Mock(return_value=mock.sentinel.new_doc, spec=[])
+
+ unused_memo = {}
+ new_document = document.__deepcopy__(unused_memo)
+ self.assertIs(new_document, mock.sentinel.new_doc)
+ document.__copy__.assert_called_once_with()
+
+ def test__eq__same_type(self):
+ document1 = self._make_one("X", "YY", client=mock.sentinel.client)
+ document2 = self._make_one("X", "ZZ", client=mock.sentinel.client)
+ document3 = self._make_one("X", "YY", client=mock.sentinel.client2)
+ document4 = self._make_one("X", "YY", client=mock.sentinel.client)
+
+ pairs = ((document1, document2), (document1, document3), (document2, document3))
+ for candidate1, candidate2 in pairs:
+ # We use == explicitly since assertNotEqual would use !=.
+ equality_val = candidate1 == candidate2
+ self.assertFalse(equality_val)
+
+ # Check the only equal one.
+ self.assertEqual(document1, document4)
+ self.assertIsNot(document1, document4)
+
+ def test__eq__other_type(self):
+ document = self._make_one("X", "YY", client=mock.sentinel.client)
+ other = object()
+ equality_val = document == other
+ self.assertFalse(equality_val)
+ self.assertIs(document.__eq__(other), NotImplemented)
+
+ def test___hash__(self):
+ client = mock.MagicMock()
+ client.__hash__.return_value = 234566789
+ document = self._make_one("X", "YY", client=client)
+ self.assertEqual(hash(document), hash(("X", "YY")) + hash(client))
+
+ def test__ne__same_type(self):
+ document1 = self._make_one("X", "YY", client=mock.sentinel.client)
+ document2 = self._make_one("X", "ZZ", client=mock.sentinel.client)
+ document3 = self._make_one("X", "YY", client=mock.sentinel.client2)
+ document4 = self._make_one("X", "YY", client=mock.sentinel.client)
+
+ self.assertNotEqual(document1, document2)
+ self.assertNotEqual(document1, document3)
+ self.assertNotEqual(document2, document3)
+
+ # We use != explicitly since assertEqual would use ==.
+ inequality_val = document1 != document4
+ self.assertFalse(inequality_val)
+ self.assertIsNot(document1, document4)
+
+ def test__ne__other_type(self):
+ document = self._make_one("X", "YY", client=mock.sentinel.client)
+ other = object()
+ self.assertNotEqual(document, other)
+ self.assertIs(document.__ne__(other), NotImplemented)
+
+ def test__document_path_property(self):
+ project = "hi-its-me-ok-bye"
+ client = _make_client(project=project)
+
+ collection_id = "then"
+ document_id = "090909iii"
+ document = self._make_one(collection_id, document_id, client=client)
+ doc_path = document._document_path
+ expected = "projects/{}/databases/{}/documents/{}/{}".format(
+ project, client._database, collection_id, document_id
+ )
+ self.assertEqual(doc_path, expected)
+ self.assertIs(document._document_path_internal, doc_path)
+
+ # Make sure value is cached.
+ document._document_path_internal = mock.sentinel.cached
+ self.assertIs(document._document_path, mock.sentinel.cached)
+
+ def test__document_path_property_no_client(self):
+ document = self._make_one("hi", "bye")
+ self.assertIsNone(document._client)
+ with self.assertRaises(ValueError):
+ getattr(document, "_document_path")
+
+ self.assertIsNone(document._document_path_internal)
+
+ def test_id_property(self):
+ document_id = "867-5309"
+ document = self._make_one("Co-lek-shun", document_id)
+ self.assertEqual(document.id, document_id)
+
+ def test_parent_property(self):
+ from google.cloud.firestore_v1.collection import CollectionReference
+
+ collection_id = "grocery-store"
+ document_id = "market"
+ client = _make_client()
+ document = self._make_one(collection_id, document_id, client=client)
+
+ parent = document.parent
+ self.assertIsInstance(parent, CollectionReference)
+ self.assertIs(parent._client, client)
+ self.assertEqual(parent._path, (collection_id,))
+
+ def test_collection_factory(self):
+ from google.cloud.firestore_v1.collection import CollectionReference
+
+ collection_id = "grocery-store"
+ document_id = "market"
+ new_collection = "fruits"
+ client = _make_client()
+ document = self._make_one(collection_id, document_id, client=client)
+
+ child = document.collection(new_collection)
+ self.assertIsInstance(child, CollectionReference)
+ self.assertIs(child._client, client)
+ self.assertEqual(child._path, (collection_id, document_id, new_collection))
+
+
+class TestDocumentSnapshot(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.document import DocumentSnapshot
+
+ return DocumentSnapshot
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def _make_reference(self, *args, **kwargs):
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ return DocumentReference(*args, **kwargs)
+
+ def _make_w_ref(self, ref_path=("a", "b"), data={}, exists=True):
+ client = mock.sentinel.client
+ reference = self._make_reference(*ref_path, client=client)
+ return self._make_one(
+ reference,
+ data,
+ exists,
+ mock.sentinel.read_time,
+ mock.sentinel.create_time,
+ mock.sentinel.update_time,
+ )
+
+ def test_constructor(self):
+ client = mock.sentinel.client
+ reference = self._make_reference("hi", "bye", client=client)
+ data = {"zoop": 83}
+ snapshot = self._make_one(
+ reference,
+ data,
+ True,
+ mock.sentinel.read_time,
+ mock.sentinel.create_time,
+ mock.sentinel.update_time,
+ )
+ self.assertIs(snapshot._reference, reference)
+ self.assertEqual(snapshot._data, data)
+ self.assertIsNot(snapshot._data, data) # Make sure copied.
+ self.assertTrue(snapshot._exists)
+ self.assertIs(snapshot.read_time, mock.sentinel.read_time)
+ self.assertIs(snapshot.create_time, mock.sentinel.create_time)
+ self.assertIs(snapshot.update_time, mock.sentinel.update_time)
+
+ def test___eq___other_type(self):
+ snapshot = self._make_w_ref()
+ other = object()
+ self.assertFalse(snapshot == other)
+
+ def test___eq___different_reference_same_data(self):
+ snapshot = self._make_w_ref(("a", "b"))
+ other = self._make_w_ref(("c", "d"))
+ self.assertFalse(snapshot == other)
+
+ def test___eq___same_reference_different_data(self):
+ snapshot = self._make_w_ref(("a", "b"))
+ other = self._make_w_ref(("a", "b"), {"foo": "bar"})
+ self.assertFalse(snapshot == other)
+
+ def test___eq___same_reference_same_data(self):
+ snapshot = self._make_w_ref(("a", "b"), {"foo": "bar"})
+ other = self._make_w_ref(("a", "b"), {"foo": "bar"})
+ self.assertTrue(snapshot == other)
+
+ def test___hash__(self):
+ client = mock.MagicMock()
+ client.__hash__.return_value = 234566789
+ reference = self._make_reference("hi", "bye", client=client)
+ data = {"zoop": 83}
+ update_time = DatetimeWithNanoseconds.from_timestamp_pb(
+ timestamp_pb2.Timestamp(seconds=123456, nanos=123456789)
+ )
+ snapshot = self._make_one(
+ reference, data, True, None, mock.sentinel.create_time, update_time
+ )
+ self.assertEqual(
+ hash(snapshot), hash(reference) + hash(123456) + hash(123456789)
+ )
+
+ def test__client_property(self):
+ reference = self._make_reference(
+ "ok", "fine", "now", "fore", client=mock.sentinel.client
+ )
+ snapshot = self._make_one(reference, {}, False, None, None, None)
+ self.assertIs(snapshot._client, mock.sentinel.client)
+
+ def test_exists_property(self):
+ reference = mock.sentinel.reference
+
+ snapshot1 = self._make_one(reference, {}, False, None, None, None)
+ self.assertFalse(snapshot1.exists)
+ snapshot2 = self._make_one(reference, {}, True, None, None, None)
+ self.assertTrue(snapshot2.exists)
+
+ def test_id_property(self):
+ document_id = "around"
+ reference = self._make_reference(
+ "look", document_id, client=mock.sentinel.client
+ )
+ snapshot = self._make_one(reference, {}, True, None, None, None)
+ self.assertEqual(snapshot.id, document_id)
+ self.assertEqual(reference.id, document_id)
+
+ def test_reference_property(self):
+ snapshot = self._make_one(mock.sentinel.reference, {}, True, None, None, None)
+ self.assertIs(snapshot.reference, mock.sentinel.reference)
+
+ def test_get(self):
+ data = {"one": {"bold": "move"}}
+ snapshot = self._make_one(None, data, True, None, None, None)
+
+ first_read = snapshot.get("one")
+ second_read = snapshot.get("one")
+ self.assertEqual(first_read, data.get("one"))
+ self.assertIsNot(first_read, data.get("one"))
+ self.assertEqual(first_read, second_read)
+ self.assertIsNot(first_read, second_read)
+
+ with self.assertRaises(KeyError):
+ snapshot.get("two")
+
+ def test_nonexistent_snapshot(self):
+ snapshot = self._make_one(None, None, False, None, None, None)
+ self.assertIsNone(snapshot.get("one"))
+
+ def test_to_dict(self):
+ data = {"a": 10, "b": ["definitely", "mutable"], "c": {"45": 50}}
+ snapshot = self._make_one(None, data, True, None, None, None)
+ as_dict = snapshot.to_dict()
+ self.assertEqual(as_dict, data)
+ self.assertIsNot(as_dict, data)
+ # Check that the data remains unchanged.
+ as_dict["b"].append("hi")
+ self.assertEqual(data, snapshot.to_dict())
+ self.assertNotEqual(data, as_dict)
+
+ def test_non_existent(self):
+ snapshot = self._make_one(None, None, False, None, None, None)
+ as_dict = snapshot.to_dict()
+ self.assertIsNone(as_dict)
+
+
+class Test__get_document_path(unittest.TestCase):
+ @staticmethod
+ def _call_fut(client, path):
+ from google.cloud.firestore_v1.base_document import _get_document_path
+
+ return _get_document_path(client, path)
+
+ def test_it(self):
+ project = "prah-jekt"
+ client = _make_client(project=project)
+ path = ("Some", "Document", "Child", "Shockument")
+ document_path = self._call_fut(client, path)
+
+ expected = "projects/{}/databases/{}/documents/{}".format(
+ project, client._database, "/".join(path)
+ )
+ self.assertEqual(document_path, expected)
+
+
+class Test__consume_single_get(unittest.TestCase):
+ @staticmethod
+ def _call_fut(response_iterator):
+ from google.cloud.firestore_v1.base_document import _consume_single_get
+
+ return _consume_single_get(response_iterator)
+
+ def test_success(self):
+ response_iterator = iter([mock.sentinel.result])
+ result = self._call_fut(response_iterator)
+ self.assertIs(result, mock.sentinel.result)
+
+ def test_failure_not_enough(self):
+ response_iterator = iter([])
+ with self.assertRaises(ValueError):
+ self._call_fut(response_iterator)
+
+ def test_failure_too_many(self):
+ response_iterator = iter([None, None])
+ with self.assertRaises(ValueError):
+ self._call_fut(response_iterator)
+
+
+class Test__first_write_result(unittest.TestCase):
+ @staticmethod
+ def _call_fut(write_results):
+ from google.cloud.firestore_v1.base_document import _first_write_result
+
+ return _first_write_result(write_results)
+
+ def test_success(self):
+ from google.protobuf import timestamp_pb2
+ from google.cloud.firestore_v1.types import write
+
+ single_result = write.WriteResult(
+ update_time=timestamp_pb2.Timestamp(seconds=1368767504, nanos=458000123)
+ )
+ write_results = [single_result]
+ result = self._call_fut(write_results)
+ self.assertIs(result, single_result)
+
+ def test_failure_not_enough(self):
+ write_results = []
+ with self.assertRaises(ValueError):
+ self._call_fut(write_results)
+
+ def test_more_than_one(self):
+ from google.cloud.firestore_v1.types import write
+
+ result1 = write.WriteResult()
+ result2 = write.WriteResult()
+ write_results = [result1, result2]
+ result = self._call_fut(write_results)
+ self.assertIs(result, result1)
+
+
+def _make_credentials():
+ import google.auth.credentials
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_client(project="project-project"):
+ from google.cloud.firestore_v1.client import Client
+
+ credentials = _make_credentials()
+ return Client(project=project, credentials=credentials)
diff --git a/tests/unit/v1beta1/test_query.py b/tests/unit/v1/test_base_query.py
similarity index 62%
rename from tests/unit/v1beta1/test_query.py
rename to tests/unit/v1/test_base_query.py
index 455a56b7f7..4b22f6cd80 100644
--- a/tests/unit/v1beta1/test_query.py
+++ b/tests/unit/v1/test_base_query.py
@@ -13,22 +13,15 @@
# limitations under the License.
import datetime
-import types
import unittest
import mock
-import pytest
-import six
-class TestQuery(unittest.TestCase):
-
- if six.PY2:
- assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
-
+class TestBaseQuery(unittest.TestCase):
@staticmethod
def _get_target_class():
- from google.cloud.firestore_v1beta1.query import Query
+ from google.cloud.firestore_v1.query import Query
return Query
@@ -46,8 +39,11 @@ def test_constructor_defaults(self):
self.assertIsNone(query._offset)
self.assertIsNone(query._start_at)
self.assertIsNone(query._end_at)
+ self.assertFalse(query._all_descendants)
- def _make_one_all_fields(self, limit=9876, offset=12, skip_fields=(), parent=None):
+ def _make_one_all_fields(
+ self, limit=9876, offset=12, skip_fields=(), parent=None, all_descendants=True
+ ):
kwargs = {
"projection": mock.sentinel.projection,
"field_filters": mock.sentinel.filters,
@@ -56,6 +52,7 @@ def _make_one_all_fields(self, limit=9876, offset=12, skip_fields=(), parent=Non
"offset": offset,
"start_at": mock.sentinel.start_at,
"end_at": mock.sentinel.end_at,
+ "all_descendants": all_descendants,
}
for field in skip_fields:
kwargs.pop(field)
@@ -75,6 +72,7 @@ def test_constructor_explicit(self):
self.assertEqual(query._offset, offset)
self.assertIs(query._start_at, mock.sentinel.start_at)
self.assertIs(query._end_at, mock.sentinel.end_at)
+ self.assertTrue(query._all_descendants)
def test__client_property(self):
parent = mock.Mock(_client=mock.sentinel.client, spec=["_client"])
@@ -82,75 +80,79 @@ def test__client_property(self):
self.assertIs(query._client, mock.sentinel.client)
def test___eq___other_type(self):
- client = self._make_one_all_fields()
+ query = self._make_one_all_fields()
other = object()
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_parent(self):
parent = mock.sentinel.parent
other_parent = mock.sentinel.other_parent
- client = self._make_one_all_fields(parent=parent)
+ query = self._make_one_all_fields(parent=parent)
other = self._make_one_all_fields(parent=other_parent)
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_projection(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(parent=parent, skip_fields=("projection",))
- client._projection = mock.sentinel.projection
+ query = self._make_one_all_fields(parent=parent, skip_fields=("projection",))
+ query._projection = mock.sentinel.projection
other = self._make_one_all_fields(parent=parent, skip_fields=("projection",))
other._projection = mock.sentinel.other_projection
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_field_filters(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(
- parent=parent, skip_fields=("field_filters",)
- )
- client._field_filters = mock.sentinel.field_filters
+ query = self._make_one_all_fields(parent=parent, skip_fields=("field_filters",))
+ query._field_filters = mock.sentinel.field_filters
other = self._make_one_all_fields(parent=parent, skip_fields=("field_filters",))
other._field_filters = mock.sentinel.other_field_filters
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_orders(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(parent=parent, skip_fields=("orders",))
- client._orders = mock.sentinel.orders
+ query = self._make_one_all_fields(parent=parent, skip_fields=("orders",))
+ query._orders = mock.sentinel.orders
other = self._make_one_all_fields(parent=parent, skip_fields=("orders",))
other._orders = mock.sentinel.other_orders
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_limit(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(parent=parent, limit=10)
+ query = self._make_one_all_fields(parent=parent, limit=10)
other = self._make_one_all_fields(parent=parent, limit=20)
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_offset(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(parent=parent, offset=10)
+ query = self._make_one_all_fields(parent=parent, offset=10)
other = self._make_one_all_fields(parent=parent, offset=20)
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_start_at(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(parent=parent, skip_fields=("start_at",))
- client._start_at = mock.sentinel.start_at
+ query = self._make_one_all_fields(parent=parent, skip_fields=("start_at",))
+ query._start_at = mock.sentinel.start_at
other = self._make_one_all_fields(parent=parent, skip_fields=("start_at",))
other._start_at = mock.sentinel.other_start_at
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
def test___eq___different_end_at(self):
parent = mock.sentinel.parent
- client = self._make_one_all_fields(parent=parent, skip_fields=("end_at",))
- client._end_at = mock.sentinel.end_at
+ query = self._make_one_all_fields(parent=parent, skip_fields=("end_at",))
+ query._end_at = mock.sentinel.end_at
other = self._make_one_all_fields(parent=parent, skip_fields=("end_at",))
other._end_at = mock.sentinel.other_end_at
- self.assertFalse(client == other)
+ self.assertFalse(query == other)
+
+ def test___eq___different_all_descendants(self):
+ parent = mock.sentinel.parent
+ query = self._make_one_all_fields(parent=parent, all_descendants=True)
+ other = self._make_one_all_fields(parent=parent, all_descendants=False)
+ self.assertFalse(query == other)
def test___eq___hit(self):
- client = self._make_one_all_fields()
+ query = self._make_one_all_fields()
other = self._make_one_all_fields()
- self.assertTrue(client == other)
+ self.assertTrue(query == other)
def _compare_queries(self, query1, query2, attr_name):
attrs1 = query1.__dict__.copy()
@@ -166,11 +168,11 @@ def _compare_queries(self, query1, query2, attr_name):
@staticmethod
def _make_projection_for_select(field_paths):
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import query
- return query_pb2.StructuredQuery.Projection(
+ return query.StructuredQuery.Projection(
fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
+ query.StructuredQuery.FieldReference(field_path=field_path)
for field_path in field_paths
]
)
@@ -182,7 +184,7 @@ def test_select_invalid_path(self):
query.select(["*"])
def test_select(self):
- query1 = self._make_one_all_fields()
+ query1 = self._make_one_all_fields(all_descendants=True)
field_paths2 = ["foo", "bar"]
query2 = query1.select(field_paths2)
@@ -210,49 +212,50 @@ def test_where_invalid_path(self):
query.where("*", "==", 1)
def test_where(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
- query = self._make_one_all_fields(skip_fields=("field_filters",))
- new_query = query.where("power.level", ">", 9000)
+ query_inst = self._make_one_all_fields(
+ skip_fields=("field_filters",), all_descendants=True
+ )
+ new_query = query_inst.where("power.level", ">", 9000)
- self.assertIsNot(query, new_query)
+ self.assertIsNot(query_inst, new_query)
self.assertIsInstance(new_query, self._get_target_class())
self.assertEqual(len(new_query._field_filters), 1)
field_pb = new_query._field_filters[0]
- expected_pb = query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="power.level"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(integer_value=9000),
+ expected_pb = query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(field_path="power.level"),
+ op=StructuredQuery.FieldFilter.Operator.GREATER_THAN,
+ value=document.Value(integer_value=9000),
)
self.assertEqual(field_pb, expected_pb)
- self._compare_queries(query, new_query, "_field_filters")
+ self._compare_queries(query_inst, new_query, "_field_filters")
def _where_unary_helper(self, value, op_enum, op_string="=="):
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
- query = self._make_one_all_fields(skip_fields=("field_filters",))
+ query_inst = self._make_one_all_fields(skip_fields=("field_filters",))
field_path = "feeeld"
- new_query = query.where(field_path, op_string, value)
+ new_query = query_inst.where(field_path, op_string, value)
- self.assertIsNot(query, new_query)
+ self.assertIsNot(query_inst, new_query)
self.assertIsInstance(new_query, self._get_target_class())
self.assertEqual(len(new_query._field_filters), 1)
field_pb = new_query._field_filters[0]
- expected_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=op_enum,
+ expected_pb = StructuredQuery.UnaryFilter(
+ field=StructuredQuery.FieldReference(field_path=field_path), op=op_enum
)
self.assertEqual(field_pb, expected_pb)
- self._compare_queries(query, new_query, "_field_filters")
+ self._compare_queries(query_inst, new_query, "_field_filters")
def test_where_eq_null(self):
- from google.cloud.firestore_v1beta1.gapic import enums
+ from google.cloud.firestore_v1.types import StructuredQuery
- op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NULL
+ op_enum = StructuredQuery.UnaryFilter.Operator.IS_NULL
self._where_unary_helper(None, op_enum)
def test_where_gt_null(self):
@@ -260,9 +263,9 @@ def test_where_gt_null(self):
self._where_unary_helper(None, 0, op_string=">")
def test_where_eq_nan(self):
- from google.cloud.firestore_v1beta1.gapic import enums
+ from google.cloud.firestore_v1.types import StructuredQuery
- op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NAN
+ op_enum = StructuredQuery.UnaryFilter.Operator.IS_NAN
self._where_unary_helper(float("nan"), op_enum)
def test_where_le_nan(self):
@@ -270,25 +273,25 @@ def test_where_le_nan(self):
self._where_unary_helper(float("nan"), 0, op_string="<=")
def test_where_w_delete(self):
- from google.cloud.firestore_v1beta1 import DELETE_FIELD
+ from google.cloud.firestore_v1 import DELETE_FIELD
with self.assertRaises(ValueError):
self._where_unary_helper(DELETE_FIELD, 0)
def test_where_w_server_timestamp(self):
- from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP
+ from google.cloud.firestore_v1 import SERVER_TIMESTAMP
with self.assertRaises(ValueError):
self._where_unary_helper(SERVER_TIMESTAMP, 0)
def test_where_w_array_remove(self):
- from google.cloud.firestore_v1beta1 import ArrayRemove
+ from google.cloud.firestore_v1 import ArrayRemove
with self.assertRaises(ValueError):
self._where_unary_helper(ArrayRemove([1, 3, 5]), 0)
def test_where_w_array_union(self):
- from google.cloud.firestore_v1beta1 import ArrayUnion
+ from google.cloud.firestore_v1 import ArrayUnion
with self.assertRaises(ValueError):
self._where_unary_helper(ArrayUnion([2, 4, 8]), 0)
@@ -300,19 +303,19 @@ def test_order_by_invalid_path(self):
query.order_by("*")
def test_order_by(self):
- from google.cloud.firestore_v1beta1.gapic import enums
+ from google.cloud.firestore_v1.types import StructuredQuery
klass = self._get_target_class()
- query1 = self._make_one_all_fields(skip_fields=("orders",))
+ query1 = self._make_one_all_fields(
+ skip_fields=("orders",), all_descendants=True
+ )
field_path2 = "a"
query2 = query1.order_by(field_path2)
self.assertIsNot(query2, query1)
self.assertIsInstance(query2, klass)
- order_pb2 = _make_order_pb(
- field_path2, enums.StructuredQuery.Direction.ASCENDING
- )
- self.assertEqual(query2._orders, (order_pb2,))
+ order = _make_order_pb(field_path2, StructuredQuery.Direction.ASCENDING)
+ self.assertEqual(query2._orders, (order,))
self._compare_queries(query1, query2, "_orders")
# Make sure it appends to the orders.
@@ -320,14 +323,12 @@ def test_order_by(self):
query3 = query2.order_by(field_path3, direction=klass.DESCENDING)
self.assertIsNot(query3, query2)
self.assertIsInstance(query3, klass)
- order_pb3 = _make_order_pb(
- field_path3, enums.StructuredQuery.Direction.DESCENDING
- )
- self.assertEqual(query3._orders, (order_pb2, order_pb3))
+ order_pb3 = _make_order_pb(field_path3, StructuredQuery.Direction.DESCENDING)
+ self.assertEqual(query3._orders, (order, order_pb3))
self._compare_queries(query2, query3, "_orders")
def test_limit(self):
- query1 = self._make_one_all_fields()
+ query1 = self._make_one_all_fields(all_descendants=True)
limit2 = 100
query2 = query1.limit(limit2)
@@ -345,7 +346,7 @@ def test_limit(self):
self._compare_queries(query2, query3, "_limit")
def test_offset(self):
- query1 = self._make_one_all_fields()
+ query1 = self._make_one_all_fields(all_descendants=True)
offset2 = 23
query2 = query1.offset(offset2)
@@ -364,25 +365,26 @@ def test_offset(self):
@staticmethod
def _make_collection(*path, **kw):
- from google.cloud.firestore_v1beta1 import collection
+ from google.cloud.firestore_v1 import collection
return collection.CollectionReference(*path, **kw)
@staticmethod
def _make_docref(*path, **kw):
- from google.cloud.firestore_v1beta1 import document
+ from google.cloud.firestore_v1 import document
return document.DocumentReference(*path, **kw)
@staticmethod
def _make_snapshot(docref, values):
- from google.cloud.firestore_v1beta1 import document
+ from google.cloud.firestore_v1 import document
return document.DocumentSnapshot(docref, values, True, None, None, None)
def test__cursor_helper_w_dict(self):
values = {"a": 7, "b": "foo"}
query1 = self._make_one(mock.sentinel.parent)
+ query1._all_descendants = True
query2 = query1._cursor_helper(values, True, True)
self.assertIs(query2._parent, mock.sentinel.parent)
@@ -392,6 +394,7 @@ def test__cursor_helper_w_dict(self):
self.assertIsNone(query2._limit)
self.assertIsNone(query2._offset)
self.assertIsNone(query2._end_at)
+ self.assertTrue(query2._all_descendants)
cursor, before = query2._start_at
@@ -445,6 +448,28 @@ def test__cursor_helper_w_snapshot_wrong_collection(self):
with self.assertRaises(ValueError):
query._cursor_helper(snapshot, False, False)
+ def test__cursor_helper_w_snapshot_other_collection_all_descendants(self):
+ values = {"a": 7, "b": "foo"}
+ docref = self._make_docref("there", "doc_id")
+ snapshot = self._make_snapshot(docref, values)
+ collection = self._make_collection("here")
+ query1 = self._make_one(collection, all_descendants=True)
+
+ query2 = query1._cursor_helper(snapshot, False, False)
+
+ self.assertIs(query2._parent, collection)
+ self.assertIsNone(query2._projection)
+ self.assertEqual(query2._field_filters, ())
+ self.assertEqual(query2._orders, ())
+ self.assertIsNone(query2._limit)
+ self.assertIsNone(query2._offset)
+ self.assertIsNone(query2._start_at)
+
+ cursor, before = query2._end_at
+
+ self.assertIs(cursor, snapshot)
+ self.assertFalse(before)
+
def test__cursor_helper_w_snapshot(self):
values = {"a": 7, "b": "foo"}
docref = self._make_docref("here", "doc_id")
@@ -469,7 +494,9 @@ def test__cursor_helper_w_snapshot(self):
def test_start_at(self):
collection = self._make_collection("here")
- query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",))
+ query1 = self._make_one_all_fields(
+ parent=collection, skip_fields=("orders",), all_descendants=True
+ )
query2 = query1.order_by("hi")
document_fields3 = {"hi": "mom"}
@@ -566,53 +593,55 @@ def test__filters_pb_empty(self):
self.assertIsNone(query._filters_pb())
def test__filters_pb_single(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
query1 = self._make_one(mock.sentinel.parent)
query2 = query1.where("x.y", ">", 50.5)
filter_pb = query2._filters_pb()
- expected_pb = query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="x.y"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(double_value=50.5),
+ expected_pb = query.StructuredQuery.Filter(
+ field_filter=query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(field_path="x.y"),
+ op=StructuredQuery.FieldFilter.Operator.GREATER_THAN,
+ value=document.Value(double_value=50.5),
)
)
self.assertEqual(filter_pb, expected_pb)
def test__filters_pb_multi(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
query1 = self._make_one(mock.sentinel.parent)
query2 = query1.where("x.y", ">", 50.5)
query3 = query2.where("ABC", "==", 123)
filter_pb = query3._filters_pb()
- op_class = enums.StructuredQuery.FieldFilter.Operator
- expected_pb = query_pb2.StructuredQuery.Filter(
- composite_filter=query_pb2.StructuredQuery.CompositeFilter(
- op=enums.StructuredQuery.CompositeFilter.Operator.AND,
+ op_class = StructuredQuery.FieldFilter.Operator
+ expected_pb = query.StructuredQuery.Filter(
+ composite_filter=query.StructuredQuery.CompositeFilter(
+ op=StructuredQuery.CompositeFilter.Operator.AND,
filters=[
- query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(
+ query.StructuredQuery.Filter(
+ field_filter=query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(
field_path="x.y"
),
op=op_class.GREATER_THAN,
- value=document_pb2.Value(double_value=50.5),
+ value=document.Value(double_value=50.5),
)
),
- query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(
+ query.StructuredQuery.Filter(
+ field_filter=query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(
field_path="ABC"
),
op=op_class.EQUAL,
- value=document_pb2.Value(integer_value=123),
+ value=document.Value(integer_value=123),
)
),
],
@@ -723,7 +752,7 @@ def test__normalize_cursor_as_dict_mismatched_order(self):
query._normalize_cursor(cursor, query._orders)
def test__normalize_cursor_w_delete(self):
- from google.cloud.firestore_v1beta1 import DELETE_FIELD
+ from google.cloud.firestore_v1 import DELETE_FIELD
cursor = ([DELETE_FIELD], True)
query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
@@ -732,7 +761,7 @@ def test__normalize_cursor_w_delete(self):
query._normalize_cursor(cursor, query._orders)
def test__normalize_cursor_w_server_timestamp(self):
- from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP
+ from google.cloud.firestore_v1 import SERVER_TIMESTAMP
cursor = ([SERVER_TIMESTAMP], True)
query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
@@ -741,7 +770,7 @@ def test__normalize_cursor_w_server_timestamp(self):
query._normalize_cursor(cursor, query._orders)
def test__normalize_cursor_w_array_remove(self):
- from google.cloud.firestore_v1beta1 import ArrayRemove
+ from google.cloud.firestore_v1 import ArrayRemove
cursor = ([ArrayRemove([1, 3, 5])], True)
query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
@@ -750,7 +779,7 @@ def test__normalize_cursor_w_array_remove(self):
query._normalize_cursor(cursor, query._orders)
def test__normalize_cursor_w_array_union(self):
- from google.cloud.firestore_v1beta1 import ArrayUnion
+ from google.cloud.firestore_v1 import ArrayUnion
cursor = ([ArrayUnion([2, 4, 8])], True)
query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
@@ -770,6 +799,16 @@ def test__normalize_cursor_as_dict_hit(self):
self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
+ def test__normalize_cursor_as_dict_with_dot_key_hit(self):
+ cursor = ({"b.a": 1}, True)
+ query = self._make_one(mock.sentinel.parent).order_by("b.a", "ASCENDING")
+ self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
+
+ def test__normalize_cursor_as_dict_with_inner_data_hit(self):
+ cursor = ({"b": {"a": 1}}, True)
+ query = self._make_one(mock.sentinel.parent).order_by("b.a", "ASCENDING")
+ self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
+
def test__normalize_cursor_as_snapshot_hit(self):
values = {"b": 1}
docref = self._make_docref("here", "doc_id")
@@ -817,9 +856,10 @@ def test__normalize_cursor_w___name___wo_slash(self):
def test__to_protobuf_all_fields(self):
from google.protobuf import wrappers_pb2
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="cat", spec=["id"])
query1 = self._make_one(parent)
@@ -833,37 +873,35 @@ def test__to_protobuf_all_fields(self):
structured_query_pb = query8._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
- "select": query_pb2.StructuredQuery.Projection(
+ "select": query.StructuredQuery.Projection(
fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
+ query.StructuredQuery.FieldReference(field_path=field_path)
for field_path in ["X", "Y", "Z"]
]
),
- "where": query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="Y"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(double_value=2.5),
+ "where": query.StructuredQuery.Filter(
+ field_filter=query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(field_path="Y"),
+ op=StructuredQuery.FieldFilter.Operator.GREATER_THAN,
+ value=document.Value(double_value=2.5),
)
),
- "order_by": [
- _make_order_pb("X", enums.StructuredQuery.Direction.ASCENDING)
- ],
- "start_at": query_pb2.Cursor(
- values=[document_pb2.Value(integer_value=10)], before=True
+ "order_by": [_make_order_pb("X", StructuredQuery.Direction.ASCENDING)],
+ "start_at": query.Cursor(
+ values=[document.Value(integer_value=10)], before=True
),
- "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=25)]),
+ "end_at": query.Cursor(values=[document.Value(integer_value=25)]),
"offset": 3,
"limit": wrappers_pb2.Int32Value(value=17),
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_select_only(self):
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="cat", spec=["id"])
query1 = self._make_one(parent)
@@ -872,23 +910,24 @@ def test__to_protobuf_select_only(self):
structured_query_pb = query2._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
- "select": query_pb2.StructuredQuery.Projection(
+ "select": query.StructuredQuery.Projection(
fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
+ query.StructuredQuery.FieldReference(field_path=field_path)
for field_path in field_paths
]
),
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_where_only(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="dog", spec=["id"])
query1 = self._make_one(parent)
@@ -896,23 +935,24 @@ def test__to_protobuf_where_only(self):
structured_query_pb = query2._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
- "where": query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="a"),
- op=enums.StructuredQuery.FieldFilter.Operator.EQUAL,
- value=document_pb2.Value(string_value=u"b"),
+ "where": query.StructuredQuery.Filter(
+ field_filter=query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(field_path="a"),
+ op=StructuredQuery.FieldFilter.Operator.EQUAL,
+ value=document.Value(string_value=u"b"),
)
),
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_order_by_only(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="fish", spec=["id"])
query1 = self._make_one(parent)
@@ -920,64 +960,58 @@ def test__to_protobuf_order_by_only(self):
structured_query_pb = query2._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "order_by": [
- _make_order_pb("abc", enums.StructuredQuery.Direction.ASCENDING)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
+ "order_by": [_make_order_pb("abc", StructuredQuery.Direction.ASCENDING)],
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_start_at_only(self):
# NOTE: "only" is wrong since we must have ``order_by`` as well.
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="phish", spec=["id"])
- query = self._make_one(parent).order_by("X.Y").start_after({"X": {"Y": u"Z"}})
+ query_inst = (
+ self._make_one(parent).order_by("X.Y").start_after({"X": {"Y": u"Z"}})
+ )
- structured_query_pb = query._to_protobuf()
+ structured_query_pb = query_inst._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "order_by": [
- _make_order_pb("X.Y", enums.StructuredQuery.Direction.ASCENDING)
- ],
- "start_at": query_pb2.Cursor(
- values=[document_pb2.Value(string_value=u"Z")]
- ),
+ "from_": [StructuredQuery.CollectionSelector(collection_id=parent.id)],
+ "order_by": [_make_order_pb("X.Y", StructuredQuery.Direction.ASCENDING)],
+ "start_at": query.Cursor(values=[document.Value(string_value=u"Z")]),
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_end_at_only(self):
# NOTE: "only" is wrong since we must have ``order_by`` as well.
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="ghoti", spec=["id"])
- query = self._make_one(parent).order_by("a").end_at({"a": 88})
+ query_inst = self._make_one(parent).order_by("a").end_at({"a": 88})
- structured_query_pb = query._to_protobuf()
+ structured_query_pb = query_inst._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "order_by": [
- _make_order_pb("a", enums.StructuredQuery.Direction.ASCENDING)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
- "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=88)]),
+ "order_by": [_make_order_pb("a", StructuredQuery.Direction.ASCENDING)],
+ "end_at": query.Cursor(values=[document.Value(integer_value=88)]),
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_offset_only(self):
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="cartt", spec=["id"])
query1 = self._make_one(parent)
@@ -986,17 +1020,17 @@ def test__to_protobuf_offset_only(self):
structured_query_pb = query2._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
"offset": offset,
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
def test__to_protobuf_limit_only(self):
from google.protobuf import wrappers_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import query
parent = mock.Mock(id="donut", spec=["id"])
query1 = self._make_one(parent)
@@ -1005,278 +1039,15 @@ def test__to_protobuf_limit_only(self):
structured_query_pb = query2._to_protobuf()
query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
+ "from_": [
+ query.StructuredQuery.CollectionSelector(collection_id=parent.id)
],
"limit": wrappers_pb2.Int32Value(value=limit),
}
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
+ expected_pb = query.StructuredQuery(**query_kwargs)
self.assertEqual(structured_query_pb, expected_pb)
- def test_get_simple(self):
- import warnings
-
- # Create a minimal fake GAPIC.
- firestore_api = mock.Mock(spec=["run_query"])
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Make a **real** collection reference as parent.
- parent = client.collection("dee")
-
- # Add a dummy response to the minimal fake GAPIC.
- _, expected_prefix = parent._parent_info()
- name = "{}/sleep".format(expected_prefix)
- data = {"snooze": 10}
- response_pb = _make_query_response(name=name, data=data)
- firestore_api.run_query.return_value = iter([response_pb])
-
- # Execute the query and check the response.
- query = self._make_one(parent)
-
- with warnings.catch_warnings(record=True) as warned:
- get_response = query.get()
-
- self.assertIsInstance(get_response, types.GeneratorType)
- returned = list(get_response)
- self.assertEqual(len(returned), 1)
- snapshot = returned[0]
- self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
- self.assertEqual(snapshot.to_dict(), data)
-
- # Verify the mock call.
- parent_path, _ = parent._parent_info()
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- # Verify the deprecation
- self.assertEqual(len(warned), 1)
- self.assertIs(warned[0].category, DeprecationWarning)
-
- def test_stream_simple(self):
- # Create a minimal fake GAPIC.
- firestore_api = mock.Mock(spec=["run_query"])
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Make a **real** collection reference as parent.
- parent = client.collection("dee")
-
- # Add a dummy response to the minimal fake GAPIC.
- _, expected_prefix = parent._parent_info()
- name = "{}/sleep".format(expected_prefix)
- data = {"snooze": 10}
- response_pb = _make_query_response(name=name, data=data)
- firestore_api.run_query.return_value = iter([response_pb])
-
- # Execute the query and check the response.
- query = self._make_one(parent)
- get_response = query.stream()
- self.assertIsInstance(get_response, types.GeneratorType)
- returned = list(get_response)
- self.assertEqual(len(returned), 1)
- snapshot = returned[0]
- self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
- self.assertEqual(snapshot.to_dict(), data)
-
- # Verify the mock call.
- parent_path, _ = parent._parent_info()
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_stream_with_transaction(self):
- # Create a minimal fake GAPIC.
- firestore_api = mock.Mock(spec=["run_query"])
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Create a real-ish transaction for this client.
- transaction = client.transaction()
- txn_id = b"\x00\x00\x01-work-\xf2"
- transaction._id = txn_id
-
- # Make a **real** collection reference as parent.
- parent = client.collection("declaration")
-
- # Add a dummy response to the minimal fake GAPIC.
- parent_path, expected_prefix = parent._parent_info()
- name = "{}/burger".format(expected_prefix)
- data = {"lettuce": b"\xee\x87"}
- response_pb = _make_query_response(name=name, data=data)
- firestore_api.run_query.return_value = iter([response_pb])
-
- # Execute the query and check the response.
- query = self._make_one(parent)
- get_response = query.stream(transaction=transaction)
- self.assertIsInstance(get_response, types.GeneratorType)
- returned = list(get_response)
- self.assertEqual(len(returned), 1)
- snapshot = returned[0]
- self.assertEqual(snapshot.reference._path, ("declaration", "burger"))
- self.assertEqual(snapshot.to_dict(), data)
-
- # Verify the mock call.
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=txn_id,
- metadata=client._rpc_metadata,
- )
-
- def test_stream_no_results(self):
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["run_query"])
- empty_response = _make_query_response()
- run_query_response = iter([empty_response])
- firestore_api.run_query.return_value = run_query_response
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Make a **real** collection reference as parent.
- parent = client.collection("dah", "dah", "dum")
- query = self._make_one(parent)
-
- get_response = query.stream()
- self.assertIsInstance(get_response, types.GeneratorType)
- self.assertEqual(list(get_response), [])
-
- # Verify the mock call.
- parent_path, _ = parent._parent_info()
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_stream_second_response_in_empty_stream(self):
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["run_query"])
- empty_response1 = _make_query_response()
- empty_response2 = _make_query_response()
- run_query_response = iter([empty_response1, empty_response2])
- firestore_api.run_query.return_value = run_query_response
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Make a **real** collection reference as parent.
- parent = client.collection("dah", "dah", "dum")
- query = self._make_one(parent)
-
- get_response = query.stream()
- self.assertIsInstance(get_response, types.GeneratorType)
- self.assertEqual(list(get_response), [])
-
- # Verify the mock call.
- parent_path, _ = parent._parent_info()
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_stream_with_skipped_results(self):
- # Create a minimal fake GAPIC.
- firestore_api = mock.Mock(spec=["run_query"])
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Make a **real** collection reference as parent.
- parent = client.collection("talk", "and", "chew-gum")
-
- # Add two dummy responses to the minimal fake GAPIC.
- _, expected_prefix = parent._parent_info()
- response_pb1 = _make_query_response(skipped_results=1)
- name = "{}/clock".format(expected_prefix)
- data = {"noon": 12, "nested": {"bird": 10.5}}
- response_pb2 = _make_query_response(name=name, data=data)
- firestore_api.run_query.return_value = iter([response_pb1, response_pb2])
-
- # Execute the query and check the response.
- query = self._make_one(parent)
- get_response = query.stream()
- self.assertIsInstance(get_response, types.GeneratorType)
- returned = list(get_response)
- self.assertEqual(len(returned), 1)
- snapshot = returned[0]
- self.assertEqual(snapshot.reference._path, ("talk", "and", "chew-gum", "clock"))
- self.assertEqual(snapshot.to_dict(), data)
-
- # Verify the mock call.
- parent_path, _ = parent._parent_info()
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_stream_empty_after_first_response(self):
- # Create a minimal fake GAPIC.
- firestore_api = mock.Mock(spec=["run_query"])
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Make a **real** collection reference as parent.
- parent = client.collection("charles")
-
- # Add two dummy responses to the minimal fake GAPIC.
- _, expected_prefix = parent._parent_info()
- name = "{}/bark".format(expected_prefix)
- data = {"lee": "hoop"}
- response_pb1 = _make_query_response(name=name, data=data)
- response_pb2 = _make_query_response()
- firestore_api.run_query.return_value = iter([response_pb1, response_pb2])
-
- # Execute the query and check the response.
- query = self._make_one(parent)
- get_response = query.stream()
- self.assertIsInstance(get_response, types.GeneratorType)
- returned = list(get_response)
- self.assertEqual(len(returned), 1)
- snapshot = returned[0]
- self.assertEqual(snapshot.reference._path, ("charles", "bark"))
- self.assertEqual(snapshot.to_dict(), data)
-
- # Verify the mock call.
- parent_path, _ = parent._parent_info()
- firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- @mock.patch("google.cloud.firestore_v1beta1.query.Watch", autospec=True)
- def test_on_snapshot(self, watch):
- query = self._make_one(mock.sentinel.parent)
- query.on_snapshot(None)
- watch.for_query.assert_called_once()
-
def test_comparator_no_ordering(self):
query = self._make_one(mock.sentinel.parent)
query._orders = []
@@ -1371,22 +1142,59 @@ def test_comparator_missing_order_by_field_in_data_raises(self):
class Test__enum_from_op_string(unittest.TestCase):
@staticmethod
def _call_fut(op_string):
- from google.cloud.firestore_v1beta1.query import _enum_from_op_string
+ from google.cloud.firestore_v1.base_query import _enum_from_op_string
return _enum_from_op_string(op_string)
- def test_success(self):
- from google.cloud.firestore_v1beta1.gapic import enums
+ @staticmethod
+ def _get_op_class():
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ return StructuredQuery.FieldFilter.Operator
- op_class = enums.StructuredQuery.FieldFilter.Operator
+ def test_lt(self):
+ op_class = self._get_op_class()
self.assertEqual(self._call_fut("<"), op_class.LESS_THAN)
+
+ def test_le(self):
+ op_class = self._get_op_class()
self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL)
+
+ def test_eq(self):
+ op_class = self._get_op_class()
self.assertEqual(self._call_fut("=="), op_class.EQUAL)
+
+ def test_ge(self):
+ op_class = self._get_op_class()
self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL)
+
+ def test_gt(self):
+ op_class = self._get_op_class()
self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN)
+
+ def test_array_contains(self):
+ op_class = self._get_op_class()
self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS)
- def test_failure(self):
+ def test_in(self):
+ op_class = self._get_op_class()
+ self.assertEqual(self._call_fut("in"), op_class.IN)
+
+ def test_array_contains_any(self):
+ op_class = self._get_op_class()
+ self.assertEqual(
+ self._call_fut("array_contains_any"), op_class.ARRAY_CONTAINS_ANY
+ )
+
+ def test_not_in(self):
+ op_class = self._get_op_class()
+ self.assertEqual(self._call_fut("not-in"), op_class.NOT_IN)
+
+ def test_not_eq(self):
+ op_class = self._get_op_class()
+ self.assertEqual(self._call_fut("!="), op_class.NOT_EQUAL)
+
+ def test_invalid(self):
with self.assertRaises(ValueError):
self._call_fut("?")
@@ -1394,7 +1202,7 @@ def test_failure(self):
class Test__isnan(unittest.TestCase):
@staticmethod
def _call_fut(value):
- from google.cloud.firestore_v1beta1.query import _isnan
+ from google.cloud.firestore_v1.base_query import _isnan
return _isnan(value)
@@ -1412,15 +1220,16 @@ def test_invalid(self):
class Test__enum_from_direction(unittest.TestCase):
@staticmethod
def _call_fut(direction):
- from google.cloud.firestore_v1beta1.query import _enum_from_direction
+ from google.cloud.firestore_v1.base_query import _enum_from_direction
return _enum_from_direction(direction)
def test_success(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.query import Query
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.query import Query
- dir_class = enums.StructuredQuery.Direction
+ dir_class = StructuredQuery.Direction
self.assertEqual(self._call_fut(Query.ASCENDING), dir_class.ASCENDING)
self.assertEqual(self._call_fut(Query.DESCENDING), dir_class.DESCENDING)
@@ -1436,34 +1245,36 @@ def test_failure(self):
class Test__filter_pb(unittest.TestCase):
@staticmethod
def _call_fut(field_or_unary):
- from google.cloud.firestore_v1beta1.query import _filter_pb
+ from google.cloud.firestore_v1.base_query import _filter_pb
return _filter_pb(field_or_unary)
def test_unary(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import StructuredQuery
- unary_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="a.b.c"),
- op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL,
+ from google.cloud.firestore_v1.types import query
+
+ unary_pb = query.StructuredQuery.UnaryFilter(
+ field=query.StructuredQuery.FieldReference(field_path="a.b.c"),
+ op=StructuredQuery.UnaryFilter.Operator.IS_NULL,
)
filter_pb = self._call_fut(unary_pb)
- expected_pb = query_pb2.StructuredQuery.Filter(unary_filter=unary_pb)
+ expected_pb = query.StructuredQuery.Filter(unary_filter=unary_pb)
self.assertEqual(filter_pb, expected_pb)
def test_field(self):
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import query_pb2
-
- field_filter_pb = query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="XYZ"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(double_value=90.75),
+ from google.cloud.firestore_v1.types import StructuredQuery
+
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import query
+
+ field_filter_pb = query.StructuredQuery.FieldFilter(
+ field=query.StructuredQuery.FieldReference(field_path="XYZ"),
+ op=StructuredQuery.FieldFilter.Operator.GREATER_THAN,
+ value=document.Value(double_value=90.75),
)
filter_pb = self._call_fut(field_filter_pb)
- expected_pb = query_pb2.StructuredQuery.Filter(field_filter=field_filter_pb)
+ expected_pb = query.StructuredQuery.Filter(field_filter=field_filter_pb)
self.assertEqual(filter_pb, expected_pb)
def test_bad_type(self):
@@ -1474,7 +1285,7 @@ def test_bad_type(self):
class Test__cursor_pb(unittest.TestCase):
@staticmethod
def _call_fut(cursor_pair):
- from google.cloud.firestore_v1beta1.query import _cursor_pb
+ from google.cloud.firestore_v1.base_query import _cursor_pb
return _cursor_pb(cursor_pair)
@@ -1482,15 +1293,15 @@ def test_no_pair(self):
self.assertIsNone(self._call_fut(None))
def test_success(self):
- from google.cloud.firestore_v1beta1.proto import query_pb2
- from google.cloud.firestore_v1beta1 import _helpers
+ from google.cloud.firestore_v1.types import query
+ from google.cloud.firestore_v1 import _helpers
data = [1.5, 10, True]
cursor_pair = data, True
cursor_pb = self._call_fut(cursor_pair)
- expected_pb = query_pb2.Cursor(
+ expected_pb = query.Cursor(
values=[_helpers.encode_value(value) for value in data], before=True
)
self.assertEqual(cursor_pb, expected_pb)
@@ -1499,7 +1310,7 @@ def test_success(self):
class Test__query_response_to_snapshot(unittest.TestCase):
@staticmethod
def _call_fut(response_pb, collection, expected_prefix):
- from google.cloud.firestore_v1beta1.query import _query_response_to_snapshot
+ from google.cloud.firestore_v1.base_query import _query_response_to_snapshot
return _query_response_to_snapshot(response_pb, collection, expected_prefix)
@@ -1515,7 +1326,7 @@ def test_after_offset(self):
self.assertIsNone(snapshot)
def test_response(self):
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
+ from google.cloud.firestore_v1.document import DocumentSnapshot
client = _make_client()
collection = client.collection("a", "b", "c")
@@ -1538,6 +1349,46 @@ def test_response(self):
self.assertEqual(snapshot.update_time, response_pb.document.update_time)
+class Test__collection_group_query_response_to_snapshot(unittest.TestCase):
+ @staticmethod
+ def _call_fut(response_pb, collection):
+ from google.cloud.firestore_v1.base_query import (
+ _collection_group_query_response_to_snapshot,
+ )
+
+ return _collection_group_query_response_to_snapshot(response_pb, collection)
+
+ def test_empty(self):
+ response_pb = _make_query_response()
+ snapshot = self._call_fut(response_pb, None)
+ self.assertIsNone(snapshot)
+
+ def test_after_offset(self):
+ skipped_results = 410
+ response_pb = _make_query_response(skipped_results=skipped_results)
+ snapshot = self._call_fut(response_pb, None)
+ self.assertIsNone(snapshot)
+
+ def test_response(self):
+ from google.cloud.firestore_v1.document import DocumentSnapshot
+
+ client = _make_client()
+ collection = client.collection("a", "b", "c")
+ other_collection = client.collection("a", "b", "d")
+ to_match = other_collection.document("gigantic")
+ data = {"a": 901, "b": True}
+ response_pb = _make_query_response(name=to_match._document_path, data=data)
+
+ snapshot = self._call_fut(response_pb, collection)
+ self.assertIsInstance(snapshot, DocumentSnapshot)
+ self.assertEqual(snapshot.reference._document_path, to_match._document_path)
+ self.assertEqual(snapshot.to_dict(), data)
+ self.assertTrue(snapshot.exists)
+ self.assertEqual(snapshot.read_time, response_pb._pb.read_time)
+ self.assertEqual(snapshot.create_time, response_pb._pb.document.create_time)
+ self.assertEqual(snapshot.update_time, response_pb._pb.document.update_time)
+
+
def _make_credentials():
import google.auth.credentials
@@ -1545,29 +1396,27 @@ def _make_credentials():
def _make_client(project="project-project"):
- from google.cloud.firestore_v1beta1.client import Client
+ from google.cloud.firestore_v1.client import Client
credentials = _make_credentials()
-
- with pytest.deprecated_call():
- return Client(project=project, credentials=credentials)
+ return Client(project=project, credentials=credentials)
def _make_order_pb(field_path, direction):
- from google.cloud.firestore_v1beta1.proto import query_pb2
+ from google.cloud.firestore_v1.types import query
- return query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
+ return query.StructuredQuery.Order(
+ field=query.StructuredQuery.FieldReference(field_path=field_path),
direction=direction,
)
def _make_query_response(**kwargs):
# kwargs supported are ``skipped_results``, ``name`` and ``data``
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import firestore
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.firestore_v1beta1 import _helpers
+ from google.cloud.firestore_v1 import _helpers
now = datetime.datetime.utcnow()
read_time = _datetime_to_pb_timestamp(now)
@@ -1576,15 +1425,81 @@ def _make_query_response(**kwargs):
name = kwargs.pop("name", None)
data = kwargs.pop("data", None)
if name is not None and data is not None:
- document_pb = document_pb2.Document(
- name=name, fields=_helpers.encode_dict(data)
- )
+ document_pb = document.Document(name=name, fields=_helpers.encode_dict(data))
delta = datetime.timedelta(seconds=100)
update_time = _datetime_to_pb_timestamp(now - delta)
create_time = _datetime_to_pb_timestamp(now - 2 * delta)
- document_pb.update_time.CopyFrom(update_time)
- document_pb.create_time.CopyFrom(create_time)
+ document_pb._pb.update_time.CopyFrom(update_time)
+ document_pb._pb.create_time.CopyFrom(create_time)
kwargs["document"] = document_pb
- return firestore_pb2.RunQueryResponse(**kwargs)
+ return firestore.RunQueryResponse(**kwargs)
+
+
+def _make_cursor_pb(pair):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import query
+
+ values, before = pair
+ value_pbs = [_helpers.encode_value(value) for value in values]
+ return query.Cursor(values=value_pbs, before=before)
+
+
+class TestQueryPartition(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.base_query import QueryPartition
+
+ return QueryPartition
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ partition = self._make_one(mock.sentinel.query, "start", "end")
+ assert partition._query is mock.sentinel.query
+ assert partition.start_at == "start"
+ assert partition.end_at == "end"
+
+ def test_query_begin(self):
+ partition = self._make_one(DummyQuery("PARENT"), None, "end")
+ query = partition.query()
+ assert query._parent == "PARENT"
+ assert query.all_descendants == "YUP"
+ assert query.orders == "ORDER"
+ assert query.start_at is None
+ assert query.end_at == (["end"], True)
+
+ def test_query_middle(self):
+ partition = self._make_one(DummyQuery("PARENT"), "start", "end")
+ query = partition.query()
+ assert query._parent == "PARENT"
+ assert query.all_descendants == "YUP"
+ assert query.orders == "ORDER"
+ assert query.start_at == (["start"], True)
+ assert query.end_at == (["end"], True)
+
+ def test_query_end(self):
+ partition = self._make_one(DummyQuery("PARENT"), "start", None)
+ query = partition.query()
+ assert query._parent == "PARENT"
+ assert query.all_descendants == "YUP"
+ assert query.orders == "ORDER"
+ assert query.start_at == (["start"], True)
+ assert query.end_at is None
+
+
+class DummyQuery:
+ _all_descendants = "YUP"
+ _PARTITION_QUERY_ORDER = "ORDER"
+
+ def __init__(
+ self, parent, *, all_descendants=None, orders=None, start_at=None, end_at=None
+ ):
+ self._parent = parent
+ self.all_descendants = all_descendants
+ self.orders = orders
+ self.start_at = start_at
+ self.end_at = end_at
diff --git a/tests/unit/v1/test_base_transaction.py b/tests/unit/v1/test_base_transaction.py
new file mode 100644
index 0000000000..b0dc527de2
--- /dev/null
+++ b/tests/unit/v1/test_base_transaction.py
@@ -0,0 +1,119 @@
+# Copyright 2017 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import mock
+
+
+class TestBaseTransaction(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.base_transaction import BaseTransaction
+
+ return BaseTransaction
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor_defaults(self):
+ from google.cloud.firestore_v1.transaction import MAX_ATTEMPTS
+
+ transaction = self._make_one()
+ self.assertEqual(transaction._max_attempts, MAX_ATTEMPTS)
+ self.assertFalse(transaction._read_only)
+ self.assertIsNone(transaction._id)
+
+ def test_constructor_explicit(self):
+ transaction = self._make_one(max_attempts=10, read_only=True)
+ self.assertEqual(transaction._max_attempts, 10)
+ self.assertTrue(transaction._read_only)
+ self.assertIsNone(transaction._id)
+
+ def test__options_protobuf_read_only(self):
+ from google.cloud.firestore_v1.types import common
+
+ transaction = self._make_one(read_only=True)
+ options_pb = transaction._options_protobuf(None)
+ expected_pb = common.TransactionOptions(
+ read_only=common.TransactionOptions.ReadOnly()
+ )
+ self.assertEqual(options_pb, expected_pb)
+
+ def test__options_protobuf_read_only_retry(self):
+ from google.cloud.firestore_v1.base_transaction import _CANT_RETRY_READ_ONLY
+
+ transaction = self._make_one(read_only=True)
+ retry_id = b"illuminate"
+
+ with self.assertRaises(ValueError) as exc_info:
+ transaction._options_protobuf(retry_id)
+
+ self.assertEqual(exc_info.exception.args, (_CANT_RETRY_READ_ONLY,))
+
+ def test__options_protobuf_read_write(self):
+ transaction = self._make_one()
+ options_pb = transaction._options_protobuf(None)
+ self.assertIsNone(options_pb)
+
+ def test__options_protobuf_on_retry(self):
+ from google.cloud.firestore_v1.types import common
+
+ transaction = self._make_one()
+ retry_id = b"hocus-pocus"
+ options_pb = transaction._options_protobuf(retry_id)
+ expected_pb = common.TransactionOptions(
+ read_write=common.TransactionOptions.ReadWrite(retry_transaction=retry_id)
+ )
+ self.assertEqual(options_pb, expected_pb)
+
+ def test_in_progress_property(self):
+ transaction = self._make_one()
+ self.assertFalse(transaction.in_progress)
+ transaction._id = b"not-none-bites"
+ self.assertTrue(transaction.in_progress)
+
+ def test_id_property(self):
+ transaction = self._make_one()
+ transaction._id = mock.sentinel.eye_dee
+ self.assertIs(transaction.id, mock.sentinel.eye_dee)
+
+
+class Test_Transactional(unittest.TestCase):
+ @staticmethod
+ def _get_target_class():
+ from google.cloud.firestore_v1.base_transaction import _BaseTransactional
+
+ return _BaseTransactional
+
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+
+ def test_constructor(self):
+ wrapped = self._make_one(mock.sentinel.callable_)
+ self.assertIs(wrapped.to_wrap, mock.sentinel.callable_)
+ self.assertIsNone(wrapped.current_id)
+ self.assertIsNone(wrapped.retry_id)
+
+ def test__reset(self):
+ wrapped = self._make_one(mock.sentinel.callable_)
+ wrapped.current_id = b"not-none"
+ wrapped.retry_id = b"also-not"
+
+ ret_val = wrapped._reset()
+ self.assertIsNone(ret_val)
+
+ self.assertIsNone(wrapped.current_id)
+ self.assertIsNone(wrapped.retry_id)
diff --git a/tests/unit/v1/test_batch.py b/tests/unit/v1/test_batch.py
index 08421d6039..119942fc34 100644
--- a/tests/unit/v1/test_batch.py
+++ b/tests/unit/v1/test_batch.py
@@ -35,136 +35,21 @@ def test_constructor(self):
self.assertIsNone(batch.write_results)
self.assertIsNone(batch.commit_time)
- def test__add_write_pbs(self):
- batch = self._make_one(mock.sentinel.client)
- self.assertEqual(batch._write_pbs, [])
- batch._add_write_pbs([mock.sentinel.write1, mock.sentinel.write2])
- self.assertEqual(batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2])
-
- def test_create(self):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("this", "one")
- document_data = {"a": 10, "b": 2.5}
- ret_val = batch.create(reference, document_data)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={
- "a": _value_pb(integer_value=document_data["a"]),
- "b": _value_pb(double_value=document_data["b"]),
- },
- ),
- current_document=common_pb2.Precondition(exists=False),
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_set(self):
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("another", "one")
- field = "zapzap"
- value = u"meadows and flowers"
- document_data = {field: value}
- ret_val = batch.set(reference, document_data)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={field: _value_pb(string_value=value)},
- )
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_set_merge(self):
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("another", "one")
- field = "zapzap"
- value = u"meadows and flowers"
- document_data = {field: value}
- ret_val = batch.set(reference, document_data, merge=True)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={field: _value_pb(string_value=value)},
- ),
- update_mask={"field_paths": [field]},
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_update(self):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("cats", "cradle")
- field_path = "head.foot"
- value = u"knees toes shoulders"
- field_updates = {field_path: value}
-
- ret_val = batch.update(reference, field_updates)
- self.assertIsNone(ret_val)
-
- map_pb = document_pb2.MapValue(fields={"foot": _value_pb(string_value=value)})
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={"head": _value_pb(map_value=map_pb)},
- ),
- update_mask=common_pb2.DocumentMask(field_paths=[field_path]),
- current_document=common_pb2.Precondition(exists=True),
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_delete(self):
- from google.cloud.firestore_v1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("early", "mornin", "dawn", "now")
- ret_val = batch.delete(reference)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(delete=reference._document_path)
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_commit(self):
+ def _commit_helper(self, retry=None, timeout=None):
from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto import firestore_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.Mock(spec=["commit"])
timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798)
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()],
+ commit_response = firestore.CommitResponse(
+ write_results=[write.WriteResult(), write.WriteResult()],
commit_time=timestamp,
)
firestore_api.commit.return_value = commit_response
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
# Attach the fake GAPIC to a real client.
client = _make_client("grand")
@@ -173,35 +58,49 @@ def test_commit(self):
# Actually make a batch with some mutations and call commit().
batch = self._make_one(client)
document1 = client.document("a", "b")
- batch.create(document1, {"ten": 10, "buck": u"ets"})
+ batch.create(document1, {"ten": 10, "buck": "ets"})
document2 = client.document("c", "d", "e", "f")
batch.delete(document2)
write_pbs = batch._write_pbs[::]
- write_results = batch.commit()
+ write_results = batch.commit(**kwargs)
self.assertEqual(write_results, list(commit_response.write_results))
self.assertEqual(batch.write_results, write_results)
- self.assertEqual(batch.commit_time, timestamp)
+ self.assertEqual(batch.commit_time.timestamp_pb(), timestamp)
# Make sure batch has no more "changes".
self.assertEqual(batch._write_pbs, [])
# Verify the mocks.
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
+ def test_commit(self):
+ self._commit_helper()
+
+ def test_commit_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+
+ self._commit_helper(retry=retry, timeout=timeout)
+
def test_as_context_mgr_wo_error(self):
from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto import firestore_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
firestore_api = mock.Mock(spec=["commit"])
timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798)
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()],
+ commit_response = firestore.CommitResponse(
+ write_results=[write.WriteResult(), write.WriteResult()],
commit_time=timestamp,
)
firestore_api.commit.return_value = commit_response
@@ -213,20 +112,22 @@ def test_as_context_mgr_wo_error(self):
with batch as ctx_mgr:
self.assertIs(ctx_mgr, batch)
- ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"})
+ ctx_mgr.create(document1, {"ten": 10, "buck": "ets"})
ctx_mgr.delete(document2)
write_pbs = batch._write_pbs[::]
self.assertEqual(batch.write_results, list(commit_response.write_results))
- self.assertEqual(batch.commit_time, timestamp)
+ self.assertEqual(batch.commit_time.timestamp_pb(), timestamp)
# Make sure batch has no more "changes".
self.assertEqual(batch._write_pbs, [])
# Verify the mocks.
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -240,24 +141,19 @@ def test_as_context_mgr_w_error(self):
with self.assertRaises(RuntimeError):
with batch as ctx_mgr:
- ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"})
+ ctx_mgr.create(document1, {"ten": 10, "buck": "ets"})
ctx_mgr.delete(document2)
raise RuntimeError("testing")
+ # batch still has its changes, as _exit_ (and commit) is not invoked
+ # changes are preserved so commit can be retried
self.assertIsNone(batch.write_results)
self.assertIsNone(batch.commit_time)
- # batch still has its changes
self.assertEqual(len(batch._write_pbs), 2)
firestore_api.commit.assert_not_called()
-def _value_pb(**kwargs):
- from google.cloud.firestore_v1.proto.document_pb2 import Value
-
- return Value(**kwargs)
-
-
def _make_credentials():
import google.auth.credentials
diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py
index 7ec062422a..0055dab2ca 100644
--- a/tests/unit/v1/test_client.py
+++ b/tests/unit/v1/test_client.py
@@ -1,4 +1,4 @@
-# Copyright 2017 Google LLC All rights reserved.
+# Copyright 2020 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -50,7 +50,7 @@ def test_constructor(self):
self.assertIsNone(client._emulator_host)
def test_constructor_with_emulator_host(self):
- from google.cloud.firestore_v1.client import _FIRESTORE_EMULATOR_HOST
+ from google.cloud.firestore_v1.base_client import _FIRESTORE_EMULATOR_HOST
credentials = _make_credentials()
emulator_host = "localhost:8081"
@@ -61,10 +61,12 @@ def test_constructor_with_emulator_host(self):
getenv.assert_called_once_with(_FIRESTORE_EMULATOR_HOST)
def test_constructor_explicit(self):
+ from google.api_core.client_options import ClientOptions
+
credentials = _make_credentials()
database = "now-db"
client_info = mock.Mock()
- client_options = mock.Mock()
+ client_options = ClientOptions("endpoint")
client = self._make_one(
project=self.PROJECT,
credentials=credentials,
@@ -87,102 +89,6 @@ def test_constructor_w_client_options(self):
)
self.assertEqual(client._target, "foo-firestore.googleapis.com")
- @mock.patch(
- "google.cloud.firestore_v1.gapic.firestore_client.FirestoreClient",
- autospec=True,
- return_value=mock.sentinel.firestore_api,
- )
- def test__firestore_api_property(self, mock_client):
- mock_client.SERVICE_ADDRESS = "endpoint"
- client = self._make_default_one()
- client_info = client._client_info = mock.Mock()
- self.assertIsNone(client._firestore_api_internal)
- firestore_api = client._firestore_api
- self.assertIs(firestore_api, mock_client.return_value)
- self.assertIs(firestore_api, client._firestore_api_internal)
- mock_client.assert_called_once_with(
- transport=client._transport, client_info=client_info
- )
-
- # Call again to show that it is cached, but call count is still 1.
- self.assertIs(client._firestore_api, mock_client.return_value)
- self.assertEqual(mock_client.call_count, 1)
-
- @mock.patch(
- "google.cloud.firestore_v1.gapic.firestore_client.FirestoreClient",
- autospec=True,
- return_value=mock.sentinel.firestore_api,
- )
- @mock.patch(
- "google.cloud.firestore_v1.gapic.transports.firestore_grpc_transport.firestore_pb2_grpc.grpc.insecure_channel",
- autospec=True,
- )
- def test__firestore_api_property_with_emulator(
- self, mock_insecure_channel, mock_client
- ):
- emulator_host = "localhost:8081"
- with mock.patch("os.getenv") as getenv:
- getenv.return_value = emulator_host
- client = self._make_default_one()
-
- self.assertIsNone(client._firestore_api_internal)
- firestore_api = client._firestore_api
- self.assertIs(firestore_api, mock_client.return_value)
- self.assertIs(firestore_api, client._firestore_api_internal)
-
- mock_insecure_channel.assert_called_once_with(emulator_host)
-
- # Call again to show that it is cached, but call count is still 1.
- self.assertIs(client._firestore_api, mock_client.return_value)
- self.assertEqual(mock_client.call_count, 1)
-
- def test___database_string_property(self):
- credentials = _make_credentials()
- database = "cheeeeez"
- client = self._make_one(
- project=self.PROJECT, credentials=credentials, database=database
- )
- self.assertIsNone(client._database_string_internal)
- database_string = client._database_string
- expected = "projects/{}/databases/{}".format(client.project, client._database)
- self.assertEqual(database_string, expected)
- self.assertIs(database_string, client._database_string_internal)
-
- # Swap it out with a unique value to verify it is cached.
- client._database_string_internal = mock.sentinel.cached
- self.assertIs(client._database_string, mock.sentinel.cached)
-
- def test___rpc_metadata_property(self):
- credentials = _make_credentials()
- database = "quanta"
- client = self._make_one(
- project=self.PROJECT, credentials=credentials, database=database
- )
-
- self.assertEqual(
- client._rpc_metadata,
- [("google-cloud-resource-prefix", client._database_string)],
- )
-
- def test__rpc_metadata_property_with_emulator(self):
- emulator_host = "localhost:8081"
- with mock.patch("os.getenv") as getenv:
- getenv.return_value = emulator_host
-
- credentials = _make_credentials()
- database = "quanta"
- client = self._make_one(
- project=self.PROJECT, credentials=credentials, database=database
- )
-
- self.assertEqual(
- client._rpc_metadata,
- [
- ("google-cloud-resource-prefix", client._database_string),
- ("authorization", "Bearer owner"),
- ],
- )
-
def test_collection_factory(self):
from google.cloud.firestore_v1.collection import CollectionReference
@@ -212,15 +118,26 @@ def test_collection_factory_nested(self):
self.assertIs(collection2._client, client)
self.assertIsInstance(collection2, CollectionReference)
+ def test__get_collection_reference(self):
+ from google.cloud.firestore_v1.collection import CollectionReference
+
+ client = self._make_default_one()
+ collection = client._get_collection_reference("collectionId")
+
+ self.assertIs(collection._client, client)
+ self.assertIsInstance(collection, CollectionReference)
+
def test_collection_group(self):
client = self._make_default_one()
- query = client.collection_group("collectionId").where("foo", "==", u"bar")
+ query = client.collection_group("collectionId").where("foo", "==", "bar")
- assert query._all_descendants
- assert query._field_filters[0].field.field_path == "foo"
- assert query._field_filters[0].value.string_value == u"bar"
- assert query._field_filters[0].op == query._field_filters[0].EQUAL
- assert query._parent.id == "collectionId"
+ self.assertTrue(query._all_descendants)
+ self.assertEqual(query._field_filters[0].field.field_path, "foo")
+ self.assertEqual(query._field_filters[0].value.string_value, "bar")
+ self.assertEqual(
+ query._field_filters[0].op, query._field_filters[0].Operator.EQUAL
+ )
+ self.assertEqual(query._parent.id, "collectionId")
def test_collection_group_no_slashes(self):
client = self._make_default_one()
@@ -276,86 +193,24 @@ def test_document_factory_w_nested_path(self):
self.assertIs(document2._client, client)
self.assertIsInstance(document2, DocumentReference)
- def test_field_path(self):
- klass = self._get_target_class()
- self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c")
-
- def test_write_option_last_update(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1._helpers import LastUpdateOption
-
- timestamp = timestamp_pb2.Timestamp(seconds=1299767599, nanos=811111097)
-
- klass = self._get_target_class()
- option = klass.write_option(last_update_time=timestamp)
- self.assertIsInstance(option, LastUpdateOption)
- self.assertEqual(option._last_update_time, timestamp)
-
- def test_write_option_exists(self):
- from google.cloud.firestore_v1._helpers import ExistsOption
-
- klass = self._get_target_class()
-
- option1 = klass.write_option(exists=False)
- self.assertIsInstance(option1, ExistsOption)
- self.assertFalse(option1._exists)
-
- option2 = klass.write_option(exists=True)
- self.assertIsInstance(option2, ExistsOption)
- self.assertTrue(option2._exists)
-
- def test_write_open_neither_arg(self):
- from google.cloud.firestore_v1.client import _BAD_OPTION_ERR
-
- klass = self._get_target_class()
- with self.assertRaises(TypeError) as exc_info:
- klass.write_option()
-
- self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,))
-
- def test_write_multiple_args(self):
- from google.cloud.firestore_v1.client import _BAD_OPTION_ERR
-
- klass = self._get_target_class()
- with self.assertRaises(TypeError) as exc_info:
- klass.write_option(exists=False, last_update_time=mock.sentinel.timestamp)
-
- self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,))
-
- def test_write_bad_arg(self):
- from google.cloud.firestore_v1.client import _BAD_OPTION_ERR
-
- klass = self._get_target_class()
- with self.assertRaises(TypeError) as exc_info:
- klass.write_option(spinach="popeye")
-
- extra = "{!r} was provided".format("spinach")
- self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR, extra))
-
- def test_collections(self):
- from google.api_core.page_iterator import Iterator
- from google.api_core.page_iterator import Page
+ def _collections_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.collection import CollectionReference
collection_ids = ["users", "projects"]
- client = self._make_default_one()
- firestore_api = mock.Mock(spec=["list_collection_ids"])
- client._firestore_api_internal = firestore_api
- class _Iterator(Iterator):
- def __init__(self, pages):
- super(_Iterator, self).__init__(client=None)
- self._pages = pages
+ class Pager(object):
+ def __iter__(self):
+ yield from collection_ids
- def _next_page(self):
- if self._pages:
- page, self._pages = self._pages[0], self._pages[1:]
- return Page(self, page, self.item_to_value)
+ firestore_api = mock.Mock(spec=["list_collection_ids"])
+ firestore_api.list_collection_ids.return_value = Pager()
- iterator = _Iterator(pages=[collection_ids])
- firestore_api.list_collection_ids.return_value = iterator
+ client = self._make_default_one()
+ client._firestore_api_internal = firestore_api
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
- collections = list(client.collections())
+ collections = list(client.collections(**kwargs))
self.assertEqual(len(collections), len(collection_ids))
for collection, collection_id in zip(collections, collection_ids):
@@ -365,10 +220,20 @@ def _next_page(self):
base_path = client._database_string + "/documents"
firestore_api.list_collection_ids.assert_called_once_with(
- base_path, metadata=client._rpc_metadata
+ request={"parent": base_path}, metadata=client._rpc_metadata, **kwargs,
)
- def _get_all_helper(self, client, references, document_pbs, **kwargs):
+ def test_collections(self):
+ self._collections_helper()
+
+ def test_collections_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._collections_helper(retry=retry, timeout=timeout)
+
+ def _invoke_get_all(self, client, references, document_pbs, **kwargs):
# Create a minimal fake GAPIC with a dummy response.
firestore_api = mock.Mock(spec=["batch_get_documents"])
response_iterator = iter(document_pbs)
@@ -383,154 +248,115 @@ def _get_all_helper(self, client, references, document_pbs, **kwargs):
return list(snapshots)
- def _info_for_get_all(self, data1, data2):
+ def _get_all_helper(self, num_snapshots=2, txn_id=None, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.async_document import DocumentSnapshot
+
client = self._make_default_one()
- document1 = client.document("pineapple", "lamp1")
- document2 = client.document("pineapple", "lamp2")
- # Make response protobufs.
+ data1 = {"a": "cheese"}
+ document1 = client.document("pineapple", "lamp1")
document_pb1, read_time = _doc_get_info(document1._document_path, data1)
response1 = _make_batch_response(found=document_pb1, read_time=read_time)
- document_pb2, read_time = _doc_get_info(document2._document_path, data2)
- response2 = _make_batch_response(found=document_pb2, read_time=read_time)
+ data2 = {"b": True, "c": 18}
+ document2 = client.document("pineapple", "lamp2")
+ document, read_time = _doc_get_info(document2._document_path, data2)
+ response2 = _make_batch_response(found=document, read_time=read_time)
- return client, document1, document2, response1, response2
+ document3 = client.document("pineapple", "lamp3")
+ response3 = _make_batch_response(missing=document3._document_path)
- def test_get_all(self):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.document import DocumentSnapshot
+ expected_data = [data1, data2, None][:num_snapshots]
+ documents = [document1, document2, document3][:num_snapshots]
+ responses = [response1, response2, response3][:num_snapshots]
+ field_paths = [
+ field_path for field_path in ["a", "b", None][:num_snapshots] if field_path
+ ]
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
- data1 = {"a": u"cheese"}
- data2 = {"b": True, "c": 18}
- info = self._info_for_get_all(data1, data2)
- client, document1, document2, response1, response2 = info
+ if txn_id is not None:
+ transaction = client.transaction()
+ transaction._id = txn_id
+ kwargs["transaction"] = transaction
- # Exercise the mocked ``batch_get_documents``.
- field_paths = ["a", "b"]
- snapshots = self._get_all_helper(
- client,
- [document1, document2],
- [response1, response2],
- field_paths=field_paths,
+ snapshots = self._invoke_get_all(
+ client, documents, responses, field_paths=field_paths, **kwargs,
)
- self.assertEqual(len(snapshots), 2)
- snapshot1 = snapshots[0]
- self.assertIsInstance(snapshot1, DocumentSnapshot)
- self.assertIs(snapshot1._reference, document1)
- self.assertEqual(snapshot1._data, data1)
+ self.assertEqual(len(snapshots), num_snapshots)
- snapshot2 = snapshots[1]
- self.assertIsInstance(snapshot2, DocumentSnapshot)
- self.assertIs(snapshot2._reference, document2)
- self.assertEqual(snapshot2._data, data2)
+ for data, document, snapshot in zip(expected_data, documents, snapshots):
+ self.assertIsInstance(snapshot, DocumentSnapshot)
+ self.assertIs(snapshot._reference, document)
+ if data is None:
+ self.assertFalse(snapshot.exists)
+ else:
+ self.assertEqual(snapshot._data, data)
# Verify the call to the mock.
- doc_paths = [document1._document_path, document2._document_path]
- mask = common_pb2.DocumentMask(field_paths=field_paths)
+ doc_paths = [document._document_path for document in documents]
+ mask = common.DocumentMask(field_paths=field_paths)
+
+ kwargs.pop("transaction", None)
+
client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- mask,
- transaction=None,
+ request={
+ "database": client._database_string,
+ "documents": doc_paths,
+ "mask": mask,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
- def test_get_all_with_transaction(self):
- from google.cloud.firestore_v1.document import DocumentSnapshot
+ def test_get_all(self):
+ self._get_all_helper()
- data = {"so-much": 484}
- info = self._info_for_get_all(data, {})
- client, document, _, response, _ = info
- transaction = client.transaction()
+ def test_get_all_with_transaction(self):
txn_id = b"the-man-is-non-stop"
- transaction._id = txn_id
+ self._get_all_helper(num_snapshots=1, txn_id=txn_id)
- # Exercise the mocked ``batch_get_documents``.
- snapshots = self._get_all_helper(
- client, [document], [response], transaction=transaction
- )
- self.assertEqual(len(snapshots), 1)
+ def test_get_all_w_retry_timeout(self):
+ from google.api_core.retry import Retry
- snapshot = snapshots[0]
- self.assertIsInstance(snapshot, DocumentSnapshot)
- self.assertIs(snapshot._reference, document)
- self.assertEqual(snapshot._data, data)
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_all_helper(retry=retry, timeout=timeout)
- # Verify the call to the mock.
- doc_paths = [document._document_path]
- client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- None,
- transaction=txn_id,
- metadata=client._rpc_metadata,
- )
+ def test_get_all_wrong_order(self):
+ self._get_all_helper(num_snapshots=3)
def test_get_all_unknown_result(self):
- from google.cloud.firestore_v1.client import _BAD_DOC_TEMPLATE
+ from google.cloud.firestore_v1.base_client import _BAD_DOC_TEMPLATE
- info = self._info_for_get_all({"z": 28.5}, {})
- client, document, _, _, response = info
+ client = self._make_default_one()
+
+ expected_document = client.document("pineapple", "lamp1")
+
+ data = {"z": 28.5}
+ wrong_document = client.document("pineapple", "lamp2")
+ document_pb, read_time = _doc_get_info(wrong_document._document_path, data)
+ response = _make_batch_response(found=document_pb, read_time=read_time)
# Exercise the mocked ``batch_get_documents``.
with self.assertRaises(ValueError) as exc_info:
- self._get_all_helper(client, [document], [response])
+ self._invoke_get_all(client, [expected_document], [response])
err_msg = _BAD_DOC_TEMPLATE.format(response.found.name)
self.assertEqual(exc_info.exception.args, (err_msg,))
# Verify the call to the mock.
- doc_paths = [document._document_path]
+ doc_paths = [expected_document._document_path]
client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- None,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_get_all_wrong_order(self):
- from google.cloud.firestore_v1.document import DocumentSnapshot
-
- data1 = {"up": 10}
- data2 = {"down": -10}
- info = self._info_for_get_all(data1, data2)
- client, document1, document2, response1, response2 = info
- document3 = client.document("pineapple", "lamp3")
- response3 = _make_batch_response(missing=document3._document_path)
-
- # Exercise the mocked ``batch_get_documents``.
- snapshots = self._get_all_helper(
- client, [document1, document2, document3], [response2, response1, response3]
- )
-
- self.assertEqual(len(snapshots), 3)
-
- snapshot1 = snapshots[0]
- self.assertIsInstance(snapshot1, DocumentSnapshot)
- self.assertIs(snapshot1._reference, document2)
- self.assertEqual(snapshot1._data, data2)
-
- snapshot2 = snapshots[1]
- self.assertIsInstance(snapshot2, DocumentSnapshot)
- self.assertIs(snapshot2._reference, document1)
- self.assertEqual(snapshot2._data, data1)
-
- self.assertFalse(snapshots[2].exists)
-
- # Verify the call to the mock.
- doc_paths = [
- document1._document_path,
- document2._document_path,
- document3._document_path,
- ]
- client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- None,
- transaction=None,
+ request={
+ "database": client._database_string,
+ "documents": doc_paths,
+ "mask": None,
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -555,163 +381,6 @@ def test_transaction(self):
self.assertIsNone(transaction._id)
-class Test__reference_info(unittest.TestCase):
- @staticmethod
- def _call_fut(references):
- from google.cloud.firestore_v1.client import _reference_info
-
- return _reference_info(references)
-
- def test_it(self):
- from google.cloud.firestore_v1.client import Client
-
- credentials = _make_credentials()
- client = Client(project="hi-projject", credentials=credentials)
-
- reference1 = client.document("a", "b")
- reference2 = client.document("a", "b", "c", "d")
- reference3 = client.document("a", "b")
- reference4 = client.document("f", "g")
-
- doc_path1 = reference1._document_path
- doc_path2 = reference2._document_path
- doc_path3 = reference3._document_path
- doc_path4 = reference4._document_path
- self.assertEqual(doc_path1, doc_path3)
-
- document_paths, reference_map = self._call_fut(
- [reference1, reference2, reference3, reference4]
- )
- self.assertEqual(document_paths, [doc_path1, doc_path2, doc_path3, doc_path4])
- # reference3 over-rides reference1.
- expected_map = {
- doc_path2: reference2,
- doc_path3: reference3,
- doc_path4: reference4,
- }
- self.assertEqual(reference_map, expected_map)
-
-
-class Test__get_reference(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, reference_map):
- from google.cloud.firestore_v1.client import _get_reference
-
- return _get_reference(document_path, reference_map)
-
- def test_success(self):
- doc_path = "a/b/c"
- reference_map = {doc_path: mock.sentinel.reference}
- self.assertIs(self._call_fut(doc_path, reference_map), mock.sentinel.reference)
-
- def test_failure(self):
- from google.cloud.firestore_v1.client import _BAD_DOC_TEMPLATE
-
- doc_path = "1/888/call-now"
- with self.assertRaises(ValueError) as exc_info:
- self._call_fut(doc_path, {})
-
- err_msg = _BAD_DOC_TEMPLATE.format(doc_path)
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
-
-class Test__parse_batch_get(unittest.TestCase):
- @staticmethod
- def _call_fut(get_doc_response, reference_map, client=mock.sentinel.client):
- from google.cloud.firestore_v1.client import _parse_batch_get
-
- return _parse_batch_get(get_doc_response, reference_map, client)
-
- @staticmethod
- def _dummy_ref_string():
- from google.cloud.firestore_v1.client import DEFAULT_DATABASE
-
- project = u"bazzzz"
- collection_id = u"fizz"
- document_id = u"buzz"
- return u"projects/{}/databases/{}/documents/{}/{}".format(
- project, DEFAULT_DATABASE, collection_id, document_id
- )
-
- def test_found(self):
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.firestore_v1.document import DocumentSnapshot
-
- now = datetime.datetime.utcnow()
- read_time = _datetime_to_pb_timestamp(now)
- delta = datetime.timedelta(seconds=100)
- update_time = _datetime_to_pb_timestamp(now - delta)
- create_time = _datetime_to_pb_timestamp(now - 2 * delta)
-
- ref_string = self._dummy_ref_string()
- document_pb = document_pb2.Document(
- name=ref_string,
- fields={
- "foo": document_pb2.Value(double_value=1.5),
- "bar": document_pb2.Value(string_value=u"skillz"),
- },
- create_time=create_time,
- update_time=update_time,
- )
- response_pb = _make_batch_response(found=document_pb, read_time=read_time)
-
- reference_map = {ref_string: mock.sentinel.reference}
- snapshot = self._call_fut(response_pb, reference_map)
- self.assertIsInstance(snapshot, DocumentSnapshot)
- self.assertIs(snapshot._reference, mock.sentinel.reference)
- self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
- self.assertTrue(snapshot._exists)
- self.assertEqual(snapshot.read_time, read_time)
- self.assertEqual(snapshot.create_time, create_time)
- self.assertEqual(snapshot.update_time, update_time)
-
- def test_missing(self):
- from google.cloud.firestore_v1.document import DocumentReference
-
- ref_string = self._dummy_ref_string()
- response_pb = _make_batch_response(missing=ref_string)
- document = DocumentReference("fizz", "bazz", client=mock.sentinel.client)
- reference_map = {ref_string: document}
- snapshot = self._call_fut(response_pb, reference_map)
- self.assertFalse(snapshot.exists)
- self.assertEqual(snapshot.id, "bazz")
- self.assertIsNone(snapshot._data)
-
- def test_unset_result_type(self):
- response_pb = _make_batch_response()
- with self.assertRaises(ValueError):
- self._call_fut(response_pb, {})
-
- def test_unknown_result_type(self):
- response_pb = mock.Mock(spec=["WhichOneof"])
- response_pb.WhichOneof.return_value = "zoob_value"
-
- with self.assertRaises(ValueError):
- self._call_fut(response_pb, {})
-
- response_pb.WhichOneof.assert_called_once_with("result")
-
-
-class Test__get_doc_mask(unittest.TestCase):
- @staticmethod
- def _call_fut(field_paths):
- from google.cloud.firestore_v1.client import _get_doc_mask
-
- return _get_doc_mask(field_paths)
-
- def test_none(self):
- self.assertIsNone(self._call_fut(None))
-
- def test_paths(self):
- from google.cloud.firestore_v1.proto import common_pb2
-
- field_paths = ["a.b", "c"]
- result = self._call_fut(field_paths)
- expected = common_pb2.DocumentMask(field_paths=field_paths)
- self.assertEqual(result, expected)
-
-
def _make_credentials():
import google.auth.credentials
@@ -719,13 +388,13 @@ def _make_credentials():
def _make_batch_response(**kwargs):
- from google.cloud.firestore_v1.proto import firestore_pb2
+ from google.cloud.firestore_v1.types import firestore
- return firestore_pb2.BatchGetDocumentsResponse(**kwargs)
+ return firestore.BatchGetDocumentsResponse(**kwargs)
def _doc_get_info(ref_string, values):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
from google.cloud._helpers import _datetime_to_pb_timestamp
from google.cloud.firestore_v1 import _helpers
@@ -735,7 +404,7 @@ def _doc_get_info(ref_string, values):
update_time = _datetime_to_pb_timestamp(now - delta)
create_time = _datetime_to_pb_timestamp(now - 2 * delta)
- document_pb = document_pb2.Document(
+ document_pb = document.Document(
name=ref_string,
fields=_helpers.encode_dict(values),
create_time=create_time,
diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py
index fde538b9db..b75dfdfa2b 100644
--- a/tests/unit/v1/test_collection.py
+++ b/tests/unit/v1/test_collection.py
@@ -16,7 +16,6 @@
import unittest
import mock
-import six
class TestCollectionReference(unittest.TestCase):
@@ -32,10 +31,18 @@ def _make_one(self, *args, **kwargs):
@staticmethod
def _get_public_methods(klass):
- return set(
- name
- for name, value in six.iteritems(klass.__dict__)
- if (not name.startswith("_") and isinstance(value, types.FunctionType))
+ return set().union(
+ *(
+ (
+ name
+ for name, value in class_.__dict__.items()
+ if (
+ not name.startswith("_")
+ and isinstance(value, types.FunctionType)
+ )
+ )
+ for class_ in (klass,) + klass.__bases__
+ )
)
def test_query_method_matching(self):
@@ -61,135 +68,8 @@ def test_constructor(self):
expected_path = (collection_id1, document_id, collection_id2)
self.assertEqual(collection._path, expected_path)
- def test_constructor_invalid_path(self):
- with self.assertRaises(ValueError):
- self._make_one()
- with self.assertRaises(ValueError):
- self._make_one(99, "doc", "bad-collection-id")
- with self.assertRaises(ValueError):
- self._make_one("bad-document-ID", None, "sub-collection")
- with self.assertRaises(ValueError):
- self._make_one("Just", "A-Document")
-
- def test_constructor_invalid_kwarg(self):
- with self.assertRaises(TypeError):
- self._make_one("Coh-lek-shun", donut=True)
-
- def test___eq___other_type(self):
- client = mock.sentinel.client
- collection = self._make_one("name", client=client)
- other = object()
- self.assertFalse(collection == other)
-
- def test___eq___different_path_same_client(self):
- client = mock.sentinel.client
- collection = self._make_one("name", client=client)
- other = self._make_one("other", client=client)
- self.assertFalse(collection == other)
-
- def test___eq___same_path_different_client(self):
- client = mock.sentinel.client
- other_client = mock.sentinel.other_client
- collection = self._make_one("name", client=client)
- other = self._make_one("name", client=other_client)
- self.assertFalse(collection == other)
-
- def test___eq___same_path_same_client(self):
- client = mock.sentinel.client
- collection = self._make_one("name", client=client)
- other = self._make_one("name", client=client)
- self.assertTrue(collection == other)
-
- def test_id_property(self):
- collection_id = "hi-bob"
- collection = self._make_one(collection_id)
- self.assertEqual(collection.id, collection_id)
-
- def test_parent_property(self):
- from google.cloud.firestore_v1.document import DocumentReference
-
- collection_id1 = "grocery-store"
- document_id = "market"
- collection_id2 = "darth"
- client = _make_client()
- collection = self._make_one(
- collection_id1, document_id, collection_id2, client=client
- )
-
- parent = collection.parent
- self.assertIsInstance(parent, DocumentReference)
- self.assertIs(parent._client, client)
- self.assertEqual(parent._path, (collection_id1, document_id))
-
- def test_parent_property_top_level(self):
- collection = self._make_one("tahp-leh-vull")
- self.assertIsNone(collection.parent)
-
- def test_document_factory_explicit_id(self):
- from google.cloud.firestore_v1.document import DocumentReference
-
- collection_id = "grocery-store"
- document_id = "market"
- client = _make_client()
- collection = self._make_one(collection_id, client=client)
-
- child = collection.document(document_id)
- self.assertIsInstance(child, DocumentReference)
- self.assertIs(child._client, client)
- self.assertEqual(child._path, (collection_id, document_id))
-
- @mock.patch(
- "google.cloud.firestore_v1.collection._auto_id",
- return_value="zorpzorpthreezorp012",
- )
- def test_document_factory_auto_id(self, mock_auto_id):
- from google.cloud.firestore_v1.document import DocumentReference
-
- collection_name = "space-town"
- client = _make_client()
- collection = self._make_one(collection_name, client=client)
-
- child = collection.document()
- self.assertIsInstance(child, DocumentReference)
- self.assertIs(child._client, client)
- self.assertEqual(child._path, (collection_name, mock_auto_id.return_value))
-
- mock_auto_id.assert_called_once_with()
-
- def test__parent_info_top_level(self):
- client = _make_client()
- collection_id = "soap"
- collection = self._make_one(collection_id, client=client)
-
- parent_path, expected_prefix = collection._parent_info()
-
- expected_path = "projects/{}/databases/{}/documents".format(
- client.project, client._database
- )
- self.assertEqual(parent_path, expected_path)
- prefix = "{}/{}".format(expected_path, collection_id)
- self.assertEqual(expected_prefix, prefix)
-
- def test__parent_info_nested(self):
- collection_id1 = "bar"
- document_id = "baz"
- collection_id2 = "chunk"
- client = _make_client()
- collection = self._make_one(
- collection_id1, document_id, collection_id2, client=client
- )
-
- parent_path, expected_prefix = collection._parent_info()
-
- expected_path = "projects/{}/databases/{}/documents/{}/{}".format(
- client.project, client._database, collection_id1, document_id
- )
- self.assertEqual(parent_path, expected_path)
- prefix = "{}/{}".format(expected_path, collection_id2)
- self.assertEqual(expected_prefix, prefix)
-
def test_add_auto_assigned(self):
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.document import DocumentReference
from google.cloud.firestore_v1 import SERVER_TIMESTAMP
from google.cloud.firestore_v1._helpers import pbs_for_create
@@ -199,13 +79,15 @@ def test_add_auto_assigned(self):
write_result = mock.Mock(
update_time=mock.sentinel.update_time, spec=["update_time"]
)
+
commit_response = mock.Mock(
write_results=[write_result],
spec=["write_results", "commit_time"],
commit_time=mock.sentinel.commit_time,
)
+
firestore_api.commit.return_value = commit_response
- create_doc_response = document_pb2.Document()
+ create_doc_response = document.Document()
firestore_api.create_document.return_value = create_doc_response
client = _make_client()
client._firestore_api_internal = firestore_api
@@ -217,7 +99,7 @@ def test_add_auto_assigned(self):
# sure transforms during adds work.
document_data = {"been": "here", "now": SERVER_TIMESTAMP}
- patch = mock.patch("google.cloud.firestore_v1.collection._auto_id")
+ patch = mock.patch("google.cloud.firestore_v1.base_collection._auto_id")
random_doc_id = "DEADBEEF"
with patch as patched:
patched.return_value = random_doc_id
@@ -232,9 +114,11 @@ def test_add_auto_assigned(self):
write_pbs = pbs_for_create(document_ref._document_path, document_data)
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
# Since we generate the ID locally, we don't call 'create_document'.
@@ -242,20 +126,21 @@ def test_add_auto_assigned(self):
@staticmethod
def _write_pb_for_create(document_path, document_data):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1 import _helpers
- return write_pb2.Write(
- update=document_pb2.Document(
+ return write.Write(
+ update=document.Document(
name=document_path, fields=_helpers.encode_dict(document_data)
),
- current_document=common_pb2.Precondition(exists=False),
+ current_document=common.Precondition(exists=False),
)
- def test_add_explicit_id(self):
+ def _add_helper(self, retry=None, timeout=None):
from google.cloud.firestore_v1.document import DocumentReference
+ from google.cloud.firestore_v1 import _helpers
# Create a minimal fake GAPIC with a dummy response.
firestore_api = mock.Mock(spec=["commit"])
@@ -277,7 +162,11 @@ def test_add_explicit_id(self):
collection = self._make_one("parent", client=client)
document_data = {"zorp": 208.75, "i-did-not": b"know that"}
doc_id = "child"
- update_time, document_ref = collection.add(document_data, document_id=doc_id)
+
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ update_time, document_ref = collection.add(
+ document_data, document_id=doc_id, **kwargs
+ )
# Verify the response and the mocks.
self.assertIs(update_time, mock.sentinel.update_time)
@@ -287,151 +176,32 @@ def test_add_explicit_id(self):
write_pb = self._write_pb_for_create(document_ref._document_path, document_data)
firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
- def test_select(self):
- from google.cloud.firestore_v1.query import Query
+ def test_add_explicit_id(self):
+ self._add_helper()
- collection = self._make_one("collection")
- field_paths = ["a", "b"]
- query = collection.select(field_paths)
+ def test_add_w_retry_timeout(self):
+ from google.api_core.retry import Retry
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- projection_paths = [
- field_ref.field_path for field_ref in query._projection.fields
- ]
- self.assertEqual(projection_paths, field_paths)
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._add_helper(retry=retry, timeout=timeout)
- @staticmethod
- def _make_field_filter_pb(field_path, op_string, value):
- from google.cloud.firestore_v1.proto import query_pb2
+ def _list_documents_helper(self, page_size=None, retry=None, timeout=None):
from google.cloud.firestore_v1 import _helpers
- from google.cloud.firestore_v1.query import _enum_from_op_string
-
- return query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=_enum_from_op_string(op_string),
- value=_helpers.encode_value(value),
- )
-
- def test_where(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- field_path = "foo"
- op_string = "=="
- value = 45
- query = collection.where(field_path, op_string, value)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(len(query._field_filters), 1)
- field_filter_pb = query._field_filters[0]
- self.assertEqual(
- field_filter_pb, self._make_field_filter_pb(field_path, op_string, value)
- )
-
- @staticmethod
- def _make_order_pb(field_path, direction):
- from google.cloud.firestore_v1.proto import query_pb2
- from google.cloud.firestore_v1.query import _enum_from_direction
-
- return query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- direction=_enum_from_direction(direction),
- )
-
- def test_order_by(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- field_path = "foo"
- direction = Query.DESCENDING
- query = collection.order_by(field_path, direction=direction)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(len(query._orders), 1)
- order_pb = query._orders[0]
- self.assertEqual(order_pb, self._make_order_pb(field_path, direction))
-
- def test_limit(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- limit = 15
- query = collection.limit(limit)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._limit, limit)
-
- def test_offset(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- offset = 113
- query = collection.offset(offset)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._offset, offset)
-
- def test_start_at(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"a": "b"}
- query = collection.start_at(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._start_at, (doc_fields, True))
-
- def test_start_after(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"d": "foo", "e": 10}
- query = collection.start_after(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._start_at, (doc_fields, False))
-
- def test_end_before(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"bar": 10.5}
- query = collection.end_before(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._end_at, (doc_fields, True))
-
- def test_end_at(self):
- from google.cloud.firestore_v1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"opportunity": True, "reason": 9}
- query = collection.end_at(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._end_at, (doc_fields, False))
-
- def _list_documents_helper(self, page_size=None):
from google.api_core.page_iterator import Iterator
from google.api_core.page_iterator import Page
from google.cloud.firestore_v1.document import DocumentReference
- from google.cloud.firestore_v1.gapic.firestore_client import FirestoreClient
- from google.cloud.firestore_v1.proto.document_pb2 import Document
+ from google.cloud.firestore_v1.services.firestore.client import FirestoreClient
+ from google.cloud.firestore_v1.types.document import Document
class _Iterator(Iterator):
def __init__(self, pages):
@@ -454,11 +224,12 @@ def _next_page(self):
api_client.list_documents.return_value = iterator
client._firestore_api_internal = api_client
collection = self._make_one("collection", client=client)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
if page_size is not None:
- documents = list(collection.list_documents(page_size=page_size))
+ documents = list(collection.list_documents(page_size=page_size, **kwargs))
else:
- documents = list(collection.list_documents())
+ documents = list(collection.list_documents(**kwargs))
# Verify the response and the mocks.
self.assertEqual(len(documents), len(document_ids))
@@ -469,53 +240,69 @@ def _next_page(self):
parent, _ = collection._parent_info()
api_client.list_documents.assert_called_once_with(
- parent,
- collection.id,
- page_size=page_size,
- show_missing=True,
+ request={
+ "parent": parent,
+ "collection_id": collection.id,
+ "page_size": page_size,
+ "show_missing": True,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
def test_list_documents_wo_page_size(self):
self._list_documents_helper()
+ def test_list_documents_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._list_documents_helper(retry=retry, timeout=timeout)
+
def test_list_documents_w_page_size(self):
self._list_documents_helper(page_size=25)
@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True)
def test_get(self, query_class):
- import warnings
+ collection = self._make_one("collection")
+ get_response = collection.get()
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+
+ self.assertIs(get_response, query_instance.get.return_value)
+ query_instance.get.assert_called_once_with(transaction=None)
+
+ @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True)
+ def test_get_w_retry_timeout(self, query_class):
+ from google.api_core.retry import Retry
+ retry = Retry(predicate=object())
+ timeout = 123.0
collection = self._make_one("collection")
- with warnings.catch_warnings(record=True) as warned:
- get_response = collection.get()
+ get_response = collection.get(retry=retry, timeout=timeout)
query_class.assert_called_once_with(collection)
query_instance = query_class.return_value
- self.assertIs(get_response, query_instance.stream.return_value)
- query_instance.stream.assert_called_once_with(transaction=None)
- # Verify the deprecation
- self.assertEqual(len(warned), 1)
- self.assertIs(warned[0].category, DeprecationWarning)
+ self.assertIs(get_response, query_instance.get.return_value)
+ query_instance.get.assert_called_once_with(
+ transaction=None, retry=retry, timeout=timeout,
+ )
@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True)
def test_get_with_transaction(self, query_class):
- import warnings
collection = self._make_one("collection")
transaction = mock.sentinel.txn
- with warnings.catch_warnings(record=True) as warned:
- get_response = collection.get(transaction=transaction)
+ get_response = collection.get(transaction=transaction)
query_class.assert_called_once_with(collection)
query_instance = query_class.return_value
- self.assertIs(get_response, query_instance.stream.return_value)
- query_instance.stream.assert_called_once_with(transaction=transaction)
- # Verify the deprecation
- self.assertEqual(len(warned), 1)
- self.assertIs(warned[0].category, DeprecationWarning)
+ self.assertIs(get_response, query_instance.get.return_value)
+ query_instance.get.assert_called_once_with(transaction=transaction)
@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True)
def test_stream(self, query_class):
@@ -527,6 +314,22 @@ def test_stream(self, query_class):
self.assertIs(stream_response, query_instance.stream.return_value)
query_instance.stream.assert_called_once_with(transaction=None)
+ @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True)
+ def test_stream_w_retry_timeout(self, query_class):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ collection = self._make_one("collection")
+ stream_response = collection.stream(retry=retry, timeout=timeout)
+
+ query_class.assert_called_once_with(collection)
+ query_instance = query_class.return_value
+ self.assertIs(stream_response, query_instance.stream.return_value)
+ query_instance.stream.assert_called_once_with(
+ transaction=None, retry=retry, timeout=timeout,
+ )
+
@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True)
def test_stream_with_transaction(self, query_class):
collection = self._make_one("collection")
@@ -545,26 +348,6 @@ def test_on_snapshot(self, watch):
watch.for_query.assert_called_once()
-class Test__auto_id(unittest.TestCase):
- @staticmethod
- def _call_fut():
- from google.cloud.firestore_v1.collection import _auto_id
-
- return _auto_id()
-
- @mock.patch("random.choice")
- def test_it(self, mock_rand_choice):
- from google.cloud.firestore_v1.collection import _AUTO_ID_CHARS
-
- mock_result = "0123456789abcdefghij"
- mock_rand_choice.side_effect = list(mock_result)
- result = self._call_fut()
- self.assertEqual(result, mock_result)
-
- mock_calls = [mock.call(_AUTO_ID_CHARS)] * 20
- self.assertEqual(mock_rand_choice.mock_calls, mock_calls)
-
-
def _make_credentials():
import google.auth.credentials
diff --git a/tests/unit/v1/test_cross_language.py b/tests/unit/v1/test_cross_language.py
index 3e0983cd41..49bc11506e 100644
--- a/tests/unit/v1/test_cross_language.py
+++ b/tests/unit/v1/test_cross_language.py
@@ -20,19 +20,21 @@
import mock
import pytest
-from google.protobuf import json_format
-from google.cloud.firestore_v1.proto import document_pb2
-from google.cloud.firestore_v1.proto import firestore_pb2
-from google.cloud.firestore_v1.proto import tests_pb2
-from google.cloud.firestore_v1.proto import write_pb2
+from google.cloud.firestore_v1.types import document
+from google.cloud.firestore_v1.types import firestore
+from google.cloud.firestore_v1.types import write
+
+from tests.unit.v1 import conformance_tests
def _load_test_json(filename):
- with open(filename, "r") as tp_file:
- tp_json = json.load(tp_file)
- test_file = tests_pb2.TestFile()
- json_format.ParseDict(tp_json, test_file)
shortname = os.path.split(filename)[-1]
+
+ with open(filename, "r") as tp_file:
+ tp_json = tp_file.read()
+
+ test_file = conformance_tests.TestFile.from_json(tp_json)
+
for test_proto in test_file.tests:
test_proto.description = test_proto.description + " (%s)" % shortname
yield test_proto
@@ -46,59 +48,37 @@ def _load_test_json(filename):
ALL_TESTPROTOS.extend(_load_test_json(filename))
_CREATE_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "create"
+ test_proto for test_proto in ALL_TESTPROTOS if "create" in test_proto
]
-_GET_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "get"
-]
+_GET_TESTPROTOS = [test_proto for test_proto in ALL_TESTPROTOS if "get" in test_proto]
-_SET_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "set"
-]
+_SET_TESTPROTOS = [test_proto for test_proto in ALL_TESTPROTOS if "set_" in test_proto]
_UPDATE_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "update"
+ test_proto for test_proto in ALL_TESTPROTOS if "update" in test_proto
]
_UPDATE_PATHS_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "update_paths"
+ test_proto for test_proto in ALL_TESTPROTOS if "update_paths" in test_proto
]
_DELETE_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "delete"
+ test_proto for test_proto in ALL_TESTPROTOS if "delete" in test_proto
]
_LISTEN_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "listen"
+ test_proto for test_proto in ALL_TESTPROTOS if "listen" in test_proto
]
_QUERY_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "query"
+ test_proto for test_proto in ALL_TESTPROTOS if "query" in test_proto
]
def _mock_firestore_api():
firestore_api = mock.Mock(spec=["commit"])
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult()]
- )
+ commit_response = firestore.CommitResponse(write_results=[write.WriteResult()])
firestore_api.commit.return_value = commit_response
return firestore_api
@@ -125,11 +105,19 @@ def _run_testcase(testcase, call, firestore_api, client):
call()
else:
call()
+
+ wrapped_writes = [
+ write.Write.wrap(write_pb) for write_pb in testcase.request.writes
+ ]
+
+ expected_request = {
+ "database": client._database_string,
+ "writes": wrapped_writes,
+ "transaction": None,
+ }
+
firestore_api.commit.assert_called_once_with(
- client._database_string,
- list(testcase.request.writes),
- transaction=None,
- metadata=client._rpc_metadata,
+ request=expected_request, metadata=client._rpc_metadata,
)
@@ -137,9 +125,9 @@ def _run_testcase(testcase, call, firestore_api, client):
def test_create_testprotos(test_proto):
testcase = test_proto.create
firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
+ client, doc = _make_client_document(firestore_api, testcase)
data = convert_data(json.loads(testcase.json_data))
- call = functools.partial(document.create, data)
+ call = functools.partial(doc.create, data)
_run_testcase(testcase, call, firestore_api, client)
@@ -147,31 +135,34 @@ def test_create_testprotos(test_proto):
def test_get_testprotos(test_proto):
testcase = test_proto.get
firestore_api = mock.Mock(spec=["get_document"])
- response = document_pb2.Document()
+ response = document.Document()
firestore_api.get_document.return_value = response
- client, document = _make_client_document(firestore_api, testcase)
+ client, doc = _make_client_document(firestore_api, testcase)
+
+ doc.get() # No '.textprotos' for errors, field_paths.
- document.get() # No '.textprotos' for errors, field_paths.
+ expected_request = {
+ "name": doc._document_path,
+ "mask": None,
+ "transaction": None,
+ }
firestore_api.get_document.assert_called_once_with(
- document._document_path,
- mask=None,
- transaction=None,
- metadata=client._rpc_metadata,
+ request=expected_request, metadata=client._rpc_metadata,
)
@pytest.mark.parametrize("test_proto", _SET_TESTPROTOS)
def test_set_testprotos(test_proto):
- testcase = test_proto.set
+ testcase = test_proto.set_
firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
+ client, doc = _make_client_document(firestore_api, testcase)
data = convert_data(json.loads(testcase.json_data))
- if testcase.HasField("option"):
+ if "option" in testcase:
merge = convert_set_option(testcase.option)
else:
merge = False
- call = functools.partial(document.set, data, merge=merge)
+ call = functools.partial(doc.set, data, merge=merge)
_run_testcase(testcase, call, firestore_api, client)
@@ -179,13 +170,13 @@ def test_set_testprotos(test_proto):
def test_update_testprotos(test_proto):
testcase = test_proto.update
firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
+ client, doc = _make_client_document(firestore_api, testcase)
data = convert_data(json.loads(testcase.json_data))
- if testcase.HasField("precondition"):
+ if "precondition" in testcase:
option = convert_precondition(testcase.precondition)
else:
option = None
- call = functools.partial(document.update, data, option)
+ call = functools.partial(doc.update, data, option)
_run_testcase(testcase, call, firestore_api, client)
@@ -199,12 +190,12 @@ def test_update_paths_testprotos(test_proto): # pragma: NO COVER
def test_delete_testprotos(test_proto):
testcase = test_proto.delete
firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
- if testcase.HasField("precondition"):
+ client, doc = _make_client_document(firestore_api, testcase)
+ if "precondition" in testcase:
option = convert_precondition(testcase.precondition)
else:
option = None
- call = functools.partial(document.delete, option)
+ call = functools.partial(doc.delete, option)
_run_testcase(testcase, call, firestore_api, client)
@@ -248,9 +239,12 @@ def callback(keys, applied_changes, read_time):
db_str = "projects/projectID/databases/(default)"
watch._firestore._database_string_internal = db_str
+ wrapped_responses = [
+ firestore.ListenResponse.wrap(proto) for proto in testcase.responses
+ ]
if testcase.is_error:
try:
- for proto in testcase.responses:
+ for proto in wrapped_responses:
watch.on_snapshot(proto)
except RuntimeError:
# listen-target-add-wrong-id.textpro
@@ -258,7 +252,7 @@ def callback(keys, applied_changes, read_time):
pass
else:
- for proto in testcase.responses:
+ for proto in wrapped_responses:
watch.on_snapshot(proto)
assert len(snapshots) == len(testcase.snapshots)
@@ -331,7 +325,7 @@ def convert_set_option(option):
_helpers.FieldPath(*field.field).to_api_repr() for field in option.fields
]
- assert option.all
+ assert option.all_
return True
@@ -405,17 +399,17 @@ def _client(self):
return self._parent._client
def _to_protobuf(self):
- from google.cloud.firestore_v1.proto import query_pb2
+ from google.cloud.firestore_v1.types import query
query_kwargs = {
"select": None,
- "from": None,
+ "from_": None,
"where": None,
"order_by": None,
"start_at": None,
"end_at": None,
}
- return query_pb2.StructuredQuery(**query_kwargs)
+ return query.StructuredQuery(**query_kwargs)
def parse_query(testcase):
@@ -457,40 +451,39 @@ def parse_query(testcase):
query = collection
for clause in testcase.clauses:
- kind = clause.WhichOneof("clause")
- if kind == "select":
+ if "select" in clause:
field_paths = [
".".join(field_path.field) for field_path in clause.select.fields
]
query = query.select(field_paths)
- elif kind == "where":
+ elif "where" in clause:
path = ".".join(clause.where.path.field)
value = convert_data(json.loads(clause.where.json_value))
query = query.where(path, clause.where.op, value)
- elif kind == "order_by":
+ elif "order_by" in clause:
path = ".".join(clause.order_by.path.field)
direction = clause.order_by.direction
direction = _directions.get(direction, direction)
query = query.order_by(path, direction=direction)
- elif kind == "offset":
+ elif "offset" in clause:
query = query.offset(clause.offset)
- elif kind == "limit":
+ elif "limit" in clause:
query = query.limit(clause.limit)
- elif kind == "start_at":
+ elif "start_at" in clause:
cursor = parse_cursor(clause.start_at, client)
query = query.start_at(cursor)
- elif kind == "start_after":
+ elif "start_after" in clause:
cursor = parse_cursor(clause.start_after, client)
query = query.start_after(cursor)
- elif kind == "end_at":
+ elif "end_at" in clause:
cursor = parse_cursor(clause.end_at, client)
query = query.end_at(cursor)
- elif kind == "end_before":
+ elif "end_before" in clause:
cursor = parse_cursor(clause.end_before, client)
query = query.end_before(cursor)
else: # pragma: NO COVER
- raise ValueError("Unknown query clause: {}".format(kind))
+ raise ValueError("Unknown query clause: {}".format(clause))
return query
@@ -504,7 +497,7 @@ def parse_cursor(cursor, client):
from google.cloud.firestore_v1 import DocumentReference
from google.cloud.firestore_v1 import DocumentSnapshot
- if cursor.HasField("doc_snapshot"):
+ if "doc_snapshot" in cursor:
path = parse_path(cursor.doc_snapshot.path)
doc_ref = DocumentReference(*path, client=client)
diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py
index 89a19df674..6ca9b3096b 100644
--- a/tests/unit/v1/test_document.py
+++ b/tests/unit/v1/test_document.py
@@ -46,179 +46,35 @@ def test_constructor(self):
)
self.assertEqual(document.path, expected_path)
- def test_constructor_invalid_path(self):
- with self.assertRaises(ValueError):
- self._make_one()
- with self.assertRaises(ValueError):
- self._make_one(None, "before", "bad-collection-id", "fifteen")
- with self.assertRaises(ValueError):
- self._make_one("bad-document-ID", None)
- with self.assertRaises(ValueError):
- self._make_one("Just", "A-Collection", "Sub")
-
- def test_constructor_invalid_kwarg(self):
- with self.assertRaises(TypeError):
- self._make_one("Coh-lek-shun", "Dahk-yu-mehnt", burger=18.75)
-
- def test___copy__(self):
- client = _make_client("rain")
- document = self._make_one("a", "b", client=client)
- # Access the document path so it is copied.
- doc_path = document._document_path
- self.assertEqual(doc_path, document._document_path_internal)
-
- new_document = document.__copy__()
- self.assertIsNot(new_document, document)
- self.assertIs(new_document._client, document._client)
- self.assertEqual(new_document._path, document._path)
- self.assertEqual(
- new_document._document_path_internal, document._document_path_internal
- )
-
- def test___deepcopy__calls_copy(self):
- client = mock.sentinel.client
- document = self._make_one("a", "b", client=client)
- document.__copy__ = mock.Mock(return_value=mock.sentinel.new_doc, spec=[])
-
- unused_memo = {}
- new_document = document.__deepcopy__(unused_memo)
- self.assertIs(new_document, mock.sentinel.new_doc)
- document.__copy__.assert_called_once_with()
-
- def test__eq__same_type(self):
- document1 = self._make_one("X", "YY", client=mock.sentinel.client)
- document2 = self._make_one("X", "ZZ", client=mock.sentinel.client)
- document3 = self._make_one("X", "YY", client=mock.sentinel.client2)
- document4 = self._make_one("X", "YY", client=mock.sentinel.client)
-
- pairs = ((document1, document2), (document1, document3), (document2, document3))
- for candidate1, candidate2 in pairs:
- # We use == explicitly since assertNotEqual would use !=.
- equality_val = candidate1 == candidate2
- self.assertFalse(equality_val)
-
- # Check the only equal one.
- self.assertEqual(document1, document4)
- self.assertIsNot(document1, document4)
-
- def test__eq__other_type(self):
- document = self._make_one("X", "YY", client=mock.sentinel.client)
- other = object()
- equality_val = document == other
- self.assertFalse(equality_val)
- self.assertIs(document.__eq__(other), NotImplemented)
-
- def test___hash__(self):
- client = mock.MagicMock()
- client.__hash__.return_value = 234566789
- document = self._make_one("X", "YY", client=client)
- self.assertEqual(hash(document), hash(("X", "YY")) + hash(client))
-
- def test__ne__same_type(self):
- document1 = self._make_one("X", "YY", client=mock.sentinel.client)
- document2 = self._make_one("X", "ZZ", client=mock.sentinel.client)
- document3 = self._make_one("X", "YY", client=mock.sentinel.client2)
- document4 = self._make_one("X", "YY", client=mock.sentinel.client)
-
- self.assertNotEqual(document1, document2)
- self.assertNotEqual(document1, document3)
- self.assertNotEqual(document2, document3)
-
- # We use != explicitly since assertEqual would use ==.
- inequality_val = document1 != document4
- self.assertFalse(inequality_val)
- self.assertIsNot(document1, document4)
-
- def test__ne__other_type(self):
- document = self._make_one("X", "YY", client=mock.sentinel.client)
- other = object()
- self.assertNotEqual(document, other)
- self.assertIs(document.__ne__(other), NotImplemented)
-
- def test__document_path_property(self):
- project = "hi-its-me-ok-bye"
- client = _make_client(project=project)
-
- collection_id = "then"
- document_id = "090909iii"
- document = self._make_one(collection_id, document_id, client=client)
- doc_path = document._document_path
- expected = "projects/{}/databases/{}/documents/{}/{}".format(
- project, client._database, collection_id, document_id
- )
- self.assertEqual(doc_path, expected)
- self.assertIs(document._document_path_internal, doc_path)
-
- # Make sure value is cached.
- document._document_path_internal = mock.sentinel.cached
- self.assertIs(document._document_path, mock.sentinel.cached)
-
- def test__document_path_property_no_client(self):
- document = self._make_one("hi", "bye")
- self.assertIsNone(document._client)
- with self.assertRaises(ValueError):
- getattr(document, "_document_path")
-
- self.assertIsNone(document._document_path_internal)
-
- def test_id_property(self):
- document_id = "867-5309"
- document = self._make_one("Co-lek-shun", document_id)
- self.assertEqual(document.id, document_id)
-
- def test_parent_property(self):
- from google.cloud.firestore_v1.collection import CollectionReference
-
- collection_id = "grocery-store"
- document_id = "market"
- client = _make_client()
- document = self._make_one(collection_id, document_id, client=client)
-
- parent = document.parent
- self.assertIsInstance(parent, CollectionReference)
- self.assertIs(parent._client, client)
- self.assertEqual(parent._path, (collection_id,))
-
- def test_collection_factory(self):
- from google.cloud.firestore_v1.collection import CollectionReference
-
- collection_id = "grocery-store"
- document_id = "market"
- new_collection = "fruits"
- client = _make_client()
- document = self._make_one(collection_id, document_id, client=client)
+ @staticmethod
+ def _make_commit_repsonse(write_results=None):
+ from google.cloud.firestore_v1.types import firestore
- child = document.collection(new_collection)
- self.assertIsInstance(child, CollectionReference)
- self.assertIs(child._client, client)
- self.assertEqual(child._path, (collection_id, document_id, new_collection))
+ response = mock.create_autospec(firestore.CommitResponse)
+ response.write_results = write_results or [mock.sentinel.write_result]
+ response.commit_time = mock.sentinel.commit_time
+ return response
@staticmethod
def _write_pb_for_create(document_path, document_data):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1 import _helpers
- return write_pb2.Write(
- update=document_pb2.Document(
+ return write.Write(
+ update=document.Document(
name=document_path, fields=_helpers.encode_dict(document_data)
),
- current_document=common_pb2.Precondition(exists=False),
+ current_document=common.Precondition(exists=False),
)
- @staticmethod
- def _make_commit_repsonse(write_results=None):
- from google.cloud.firestore_v1.proto import firestore_pb2
-
- response = mock.create_autospec(firestore_pb2.CommitResponse)
- response.write_results = write_results or [mock.sentinel.write_result]
- response.commit_time = mock.sentinel.commit_time
- return response
+ def _create_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
- def test_create(self):
# Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
+ firestore_api = mock.Mock()
+ firestore_api.commit.mock_add_spec(spec=["commit"])
firestore_api.commit.return_value = self._make_commit_repsonse()
# Attach the fake GAPIC to a real client.
@@ -228,18 +84,33 @@ def test_create(self):
# Actually make a document and call create().
document = self._make_one("foo", "twelve", client=client)
document_data = {"hello": "goodbye", "count": 99}
- write_result = document.create(document_data)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ write_result = document.create(document_data, **kwargs)
# Verify the response and the mocks.
self.assertIs(write_result, mock.sentinel.write_result)
write_pb = self._write_pb_for_create(document._document_path, document_data)
firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
+ def test_create(self):
+ self._create_helper()
+
+ def test_create_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._create_helper(retry=retry, timeout=timeout)
+
def test_create_empty(self):
# Create a minimal fake GAPIC with a dummy response.
from google.cloud.firestore_v1.document import DocumentReference
@@ -268,13 +139,13 @@ def test_create_empty(self):
@staticmethod
def _write_pb_for_set(document_path, document_data, merge):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1 import _helpers
- write_pbs = write_pb2.Write(
- update=document_pb2.Document(
+ write_pbs = write.Write(
+ update=document.Document(
name=document_path, fields=_helpers.encode_dict(document_data)
)
)
@@ -288,11 +159,13 @@ def _write_pb_for_set(document_path, document_data, merge):
field_paths = [
field_path.to_api_repr() for field_path in sorted(field_paths)
]
- mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
- write_pbs.update_mask.CopyFrom(mask)
+ mask = common.DocumentMask(field_paths=sorted(field_paths))
+ write_pbs._pb.update_mask.CopyFrom(mask._pb)
return write_pbs
- def _set_helper(self, merge=False, **option_kwargs):
+ def _set_helper(self, merge=False, retry=None, timeout=None, **option_kwargs):
+ from google.cloud.firestore_v1 import _helpers
+
# Create a minimal fake GAPIC with a dummy response.
firestore_api = mock.Mock(spec=["commit"])
firestore_api.commit.return_value = self._make_commit_repsonse()
@@ -304,41 +177,54 @@ def _set_helper(self, merge=False, **option_kwargs):
# Actually make a document and call create().
document = self._make_one("User", "Interface", client=client)
document_data = {"And": 500, "Now": b"\xba\xaa\xaa \xba\xaa\xaa"}
- write_result = document.set(document_data, merge)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ write_result = document.set(document_data, merge, **kwargs)
# Verify the response and the mocks.
self.assertIs(write_result, mock.sentinel.write_result)
write_pb = self._write_pb_for_set(document._document_path, document_data, merge)
firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
def test_set(self):
self._set_helper()
+ def test_set_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._set_helper(retry=retry, timeout=timeout)
+
def test_set_merge(self):
self._set_helper(merge=True)
@staticmethod
def _write_pb_for_update(document_path, update_values, field_paths):
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1 import _helpers
- return write_pb2.Write(
- update=document_pb2.Document(
+ return write.Write(
+ update=document.Document(
name=document_path, fields=_helpers.encode_dict(update_values)
),
- update_mask=common_pb2.DocumentMask(field_paths=field_paths),
- current_document=common_pb2.Precondition(exists=True),
+ update_mask=common.DocumentMask(field_paths=field_paths),
+ current_document=common.Precondition(exists=True),
)
- def _update_helper(self, **option_kwargs):
+ def _update_helper(self, retry=None, timeout=None, **option_kwargs):
+ from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.transforms import DELETE_FIELD
# Create a minimal fake GAPIC with a dummy response.
@@ -355,12 +241,14 @@ def _update_helper(self, **option_kwargs):
field_updates = collections.OrderedDict(
(("hello", 1), ("then.do", False), ("goodbye", DELETE_FIELD))
)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
if option_kwargs:
option = client.write_option(**option_kwargs)
- write_result = document.update(field_updates, option=option)
+ write_result = document.update(field_updates, option=option, **kwargs)
else:
option = None
- write_result = document.update(field_updates)
+ write_result = document.update(field_updates, **kwargs)
# Verify the response and the mocks.
self.assertIs(write_result, mock.sentinel.write_result)
@@ -375,10 +263,13 @@ def _update_helper(self, **option_kwargs):
if option is not None:
option.modify_write(write_pb)
firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
def test_update_with_exists(self):
@@ -388,6 +279,13 @@ def test_update_with_exists(self):
def test_update(self):
self._update_helper()
+ def test_update_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._update_helper(retry=retry, timeout=timeout)
+
def test_update_with_precondition(self):
from google.protobuf import timestamp_pb2
@@ -410,8 +308,9 @@ def test_empty_update(self):
with self.assertRaises(ValueError):
document.update(field_updates)
- def _delete_helper(self, **option_kwargs):
- from google.cloud.firestore_v1.proto import write_pb2
+ def _delete_helper(self, retry=None, timeout=None, **option_kwargs):
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import write
# Create a minimal fake GAPIC with a dummy response.
firestore_api = mock.Mock(spec=["commit"])
@@ -420,26 +319,30 @@ def _delete_helper(self, **option_kwargs):
# Attach the fake GAPIC to a real client.
client = _make_client("donut-base")
client._firestore_api_internal = firestore_api
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
# Actually make a document and call delete().
document = self._make_one("where", "we-are", client=client)
if option_kwargs:
option = client.write_option(**option_kwargs)
- delete_time = document.delete(option=option)
+ delete_time = document.delete(option=option, **kwargs)
else:
option = None
- delete_time = document.delete()
+ delete_time = document.delete(**kwargs)
# Verify the response and the mocks.
self.assertIs(delete_time, mock.sentinel.commit_time)
- write_pb = write_pb2.Write(delete=document._document_path)
+ write_pb = write.Write(delete=document._document_path)
if option is not None:
option.modify_write(write_pb)
firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
+ request={
+ "database": client._database_string,
+ "writes": [write_pb],
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
def test_delete(self):
@@ -451,17 +354,32 @@ def test_delete_with_option(self):
timestamp_pb = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244)
self._delete_helper(last_update_time=timestamp_pb)
- def _get_helper(self, field_paths=None, use_transaction=False, not_found=False):
+ def test_delete_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._delete_helper(retry=retry, timeout=timeout)
+
+ def _get_helper(
+ self,
+ field_paths=None,
+ use_transaction=False,
+ not_found=False,
+ retry=None,
+ timeout=None,
+ ):
from google.api_core.exceptions import NotFound
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import document_pb2
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.transaction import Transaction
# Create a minimal fake GAPIC with a dummy response.
create_time = 123
update_time = 234
firestore_api = mock.Mock(spec=["get_document"])
- response = mock.create_autospec(document_pb2.Document)
+ response = mock.create_autospec(document.Document)
response.fields = {}
response.create_time = create_time
response.update_time = update_time
@@ -482,7 +400,11 @@ def _get_helper(self, field_paths=None, use_transaction=False, not_found=False):
else:
transaction = None
- snapshot = document.get(field_paths=field_paths, transaction=transaction)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ snapshot = document.get(
+ field_paths=field_paths, transaction=transaction, **kwargs
+ )
self.assertIs(snapshot.reference, document)
if not_found:
@@ -500,7 +422,7 @@ def _get_helper(self, field_paths=None, use_transaction=False, not_found=False):
# Verify the request made to the API
if field_paths is not None:
- mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
+ mask = common.DocumentMask(field_paths=sorted(field_paths))
else:
mask = None
@@ -510,10 +432,13 @@ def _get_helper(self, field_paths=None, use_transaction=False, not_found=False):
expected_transaction_id = None
firestore_api.get_document.assert_called_once_with(
- document._document_path,
- mask=mask,
- transaction=expected_transaction_id,
+ request={
+ "name": document._document_path,
+ "mask": mask,
+ "transaction": expected_transaction_id,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
def test_get_not_found(self):
@@ -522,6 +447,13 @@ def test_get_not_found(self):
def test_get_default(self):
self._get_helper()
+ def test_get_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_helper(retry=retry, timeout=timeout)
+
def test_get_w_string_field_path(self):
with self.assertRaises(ValueError):
self._get_helper(field_paths="foo")
@@ -535,36 +467,30 @@ def test_get_with_multiple_field_paths(self):
def test_get_with_transaction(self):
self._get_helper(use_transaction=True)
- def _collections_helper(self, page_size=None):
- from google.api_core.page_iterator import Iterator
- from google.api_core.page_iterator import Page
+ def _collections_helper(self, page_size=None, retry=None, timeout=None):
from google.cloud.firestore_v1.collection import CollectionReference
- from google.cloud.firestore_v1.gapic.firestore_client import FirestoreClient
+ from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1.services.firestore.client import FirestoreClient
- class _Iterator(Iterator):
- def __init__(self, pages):
- super(_Iterator, self).__init__(client=None)
- self._pages = pages
+ collection_ids = ["coll-1", "coll-2"]
- def _next_page(self):
- if self._pages:
- page, self._pages = self._pages[0], self._pages[1:]
- return Page(self, page, self.item_to_value)
+ class Pager(object):
+ def __iter__(self):
+ yield from collection_ids
- collection_ids = ["coll-1", "coll-2"]
- iterator = _Iterator(pages=[collection_ids])
api_client = mock.create_autospec(FirestoreClient)
- api_client.list_collection_ids.return_value = iterator
+ api_client.list_collection_ids.return_value = Pager()
client = _make_client()
client._firestore_api_internal = api_client
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
# Actually make a document and call delete().
document = self._make_one("where", "we-are", client=client)
if page_size is not None:
- collections = list(document.collections(page_size=page_size))
+ collections = list(document.collections(page_size=page_size, **kwargs))
else:
- collections = list(document.collections())
+ collections = list(document.collections(**kwargs))
# Verify the response and the mocks.
self.assertEqual(len(collections), len(collection_ids))
@@ -574,7 +500,9 @@ def _next_page(self):
self.assertEqual(collection.id, collection_id)
api_client.list_collection_ids.assert_called_once_with(
- document._document_path, page_size=page_size, metadata=client._rpc_metadata
+ request={"parent": document._document_path, "page_size": page_size},
+ metadata=client._rpc_metadata,
+ **kwargs,
)
def test_collections_wo_page_size(self):
@@ -583,6 +511,13 @@ def test_collections_wo_page_size(self):
def test_collections_w_page_size(self):
self._collections_helper(page_size=10)
+ def test_collections_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._collections_helper(retry=retry, timeout=timeout)
+
@mock.patch("google.cloud.firestore_v1.document.Watch", autospec=True)
def test_on_snapshot(self, watch):
client = mock.Mock(_database_string="sprinklez", spec=["_database_string"])
@@ -591,227 +526,6 @@ def test_on_snapshot(self, watch):
watch.for_document.assert_called_once()
-class TestDocumentSnapshot(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1.document import DocumentSnapshot
-
- return DocumentSnapshot
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def _make_reference(self, *args, **kwargs):
- from google.cloud.firestore_v1.document import DocumentReference
-
- return DocumentReference(*args, **kwargs)
-
- def _make_w_ref(self, ref_path=("a", "b"), data={}, exists=True):
- client = mock.sentinel.client
- reference = self._make_reference(*ref_path, client=client)
- return self._make_one(
- reference,
- data,
- exists,
- mock.sentinel.read_time,
- mock.sentinel.create_time,
- mock.sentinel.update_time,
- )
-
- def test_constructor(self):
- client = mock.sentinel.client
- reference = self._make_reference("hi", "bye", client=client)
- data = {"zoop": 83}
- snapshot = self._make_one(
- reference,
- data,
- True,
- mock.sentinel.read_time,
- mock.sentinel.create_time,
- mock.sentinel.update_time,
- )
- self.assertIs(snapshot._reference, reference)
- self.assertEqual(snapshot._data, data)
- self.assertIsNot(snapshot._data, data) # Make sure copied.
- self.assertTrue(snapshot._exists)
- self.assertIs(snapshot.read_time, mock.sentinel.read_time)
- self.assertIs(snapshot.create_time, mock.sentinel.create_time)
- self.assertIs(snapshot.update_time, mock.sentinel.update_time)
-
- def test___eq___other_type(self):
- snapshot = self._make_w_ref()
- other = object()
- self.assertFalse(snapshot == other)
-
- def test___eq___different_reference_same_data(self):
- snapshot = self._make_w_ref(("a", "b"))
- other = self._make_w_ref(("c", "d"))
- self.assertFalse(snapshot == other)
-
- def test___eq___same_reference_different_data(self):
- snapshot = self._make_w_ref(("a", "b"))
- other = self._make_w_ref(("a", "b"), {"foo": "bar"})
- self.assertFalse(snapshot == other)
-
- def test___eq___same_reference_same_data(self):
- snapshot = self._make_w_ref(("a", "b"), {"foo": "bar"})
- other = self._make_w_ref(("a", "b"), {"foo": "bar"})
- self.assertTrue(snapshot == other)
-
- def test___hash__(self):
- from google.protobuf import timestamp_pb2
-
- client = mock.MagicMock()
- client.__hash__.return_value = 234566789
- reference = self._make_reference("hi", "bye", client=client)
- data = {"zoop": 83}
- update_time = timestamp_pb2.Timestamp(seconds=123456, nanos=123456789)
- snapshot = self._make_one(
- reference, data, True, None, mock.sentinel.create_time, update_time
- )
- self.assertEqual(
- hash(snapshot), hash(reference) + hash(123456) + hash(123456789)
- )
-
- def test__client_property(self):
- reference = self._make_reference(
- "ok", "fine", "now", "fore", client=mock.sentinel.client
- )
- snapshot = self._make_one(reference, {}, False, None, None, None)
- self.assertIs(snapshot._client, mock.sentinel.client)
-
- def test_exists_property(self):
- reference = mock.sentinel.reference
-
- snapshot1 = self._make_one(reference, {}, False, None, None, None)
- self.assertFalse(snapshot1.exists)
- snapshot2 = self._make_one(reference, {}, True, None, None, None)
- self.assertTrue(snapshot2.exists)
-
- def test_id_property(self):
- document_id = "around"
- reference = self._make_reference(
- "look", document_id, client=mock.sentinel.client
- )
- snapshot = self._make_one(reference, {}, True, None, None, None)
- self.assertEqual(snapshot.id, document_id)
- self.assertEqual(reference.id, document_id)
-
- def test_reference_property(self):
- snapshot = self._make_one(mock.sentinel.reference, {}, True, None, None, None)
- self.assertIs(snapshot.reference, mock.sentinel.reference)
-
- def test_get(self):
- data = {"one": {"bold": "move"}}
- snapshot = self._make_one(None, data, True, None, None, None)
-
- first_read = snapshot.get("one")
- second_read = snapshot.get("one")
- self.assertEqual(first_read, data.get("one"))
- self.assertIsNot(first_read, data.get("one"))
- self.assertEqual(first_read, second_read)
- self.assertIsNot(first_read, second_read)
-
- with self.assertRaises(KeyError):
- snapshot.get("two")
-
- def test_nonexistent_snapshot(self):
- snapshot = self._make_one(None, None, False, None, None, None)
- self.assertIsNone(snapshot.get("one"))
-
- def test_to_dict(self):
- data = {"a": 10, "b": ["definitely", "mutable"], "c": {"45": 50}}
- snapshot = self._make_one(None, data, True, None, None, None)
- as_dict = snapshot.to_dict()
- self.assertEqual(as_dict, data)
- self.assertIsNot(as_dict, data)
- # Check that the data remains unchanged.
- as_dict["b"].append("hi")
- self.assertEqual(data, snapshot.to_dict())
- self.assertNotEqual(data, as_dict)
-
- def test_non_existent(self):
- snapshot = self._make_one(None, None, False, None, None, None)
- as_dict = snapshot.to_dict()
- self.assertIsNone(as_dict)
-
-
-class Test__get_document_path(unittest.TestCase):
- @staticmethod
- def _call_fut(client, path):
- from google.cloud.firestore_v1.document import _get_document_path
-
- return _get_document_path(client, path)
-
- def test_it(self):
- project = "prah-jekt"
- client = _make_client(project=project)
- path = ("Some", "Document", "Child", "Shockument")
- document_path = self._call_fut(client, path)
-
- expected = "projects/{}/databases/{}/documents/{}".format(
- project, client._database, "/".join(path)
- )
- self.assertEqual(document_path, expected)
-
-
-class Test__consume_single_get(unittest.TestCase):
- @staticmethod
- def _call_fut(response_iterator):
- from google.cloud.firestore_v1.document import _consume_single_get
-
- return _consume_single_get(response_iterator)
-
- def test_success(self):
- response_iterator = iter([mock.sentinel.result])
- result = self._call_fut(response_iterator)
- self.assertIs(result, mock.sentinel.result)
-
- def test_failure_not_enough(self):
- response_iterator = iter([])
- with self.assertRaises(ValueError):
- self._call_fut(response_iterator)
-
- def test_failure_too_many(self):
- response_iterator = iter([None, None])
- with self.assertRaises(ValueError):
- self._call_fut(response_iterator)
-
-
-class Test__first_write_result(unittest.TestCase):
- @staticmethod
- def _call_fut(write_results):
- from google.cloud.firestore_v1.document import _first_write_result
-
- return _first_write_result(write_results)
-
- def test_success(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1.proto import write_pb2
-
- single_result = write_pb2.WriteResult(
- update_time=timestamp_pb2.Timestamp(seconds=1368767504, nanos=458000123)
- )
- write_results = [single_result]
- result = self._call_fut(write_results)
- self.assertIs(result, single_result)
-
- def test_failure_not_enough(self):
- write_results = []
- with self.assertRaises(ValueError):
- self._call_fut(write_results)
-
- def test_more_than_one(self):
- from google.cloud.firestore_v1.proto import write_pb2
-
- result1 = write_pb2.WriteResult()
- result2 = write_pb2.WriteResult()
- write_results = [result1, result2]
- result = self._call_fut(write_results)
- self.assertIs(result, result1)
-
-
def _make_credentials():
import google.auth.credentials
diff --git a/tests/unit/v1/test_order.py b/tests/unit/v1/test_order.py
index e5327dbc60..90d99e563e 100644
--- a/tests/unit/v1/test_order.py
+++ b/tests/unit/v1/test_order.py
@@ -14,23 +14,18 @@
# limitations under the License.
import mock
-import six
import unittest
from google.cloud.firestore_v1._helpers import encode_value, GeoPoint
from google.cloud.firestore_v1.order import Order
from google.cloud.firestore_v1.order import TypeOrder
-from google.cloud.firestore_v1.proto import document_pb2
+from google.cloud.firestore_v1.types import document
from google.protobuf import timestamp_pb2
class TestOrder(unittest.TestCase):
-
- if six.PY2:
- assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
-
@staticmethod
def _get_target_class():
from google.cloud.firestore_v1.order import Order
@@ -188,7 +183,7 @@ def test_failure_to_find_type(self):
# expect this to fail with value error.
with mock.patch.object(TypeOrder, "from_value") as to:
to.value = None
- with self.assertRaisesRegex(ValueError, "'Unknown ``value_type``"):
+ with self.assertRaisesRegex(ValueError, "Unknown ``value_type``"):
target.compare(left, right)
def test_compare_objects_different_keys(self):
@@ -212,13 +207,11 @@ def _int_value(value):
def _string_value(s):
- if not isinstance(s, six.text_type):
- s = six.u(s)
return encode_value(s)
def _reference_value(r):
- return document_pb2.Value(reference_value=r)
+ return document.Value(reference_value=r)
def _blob_value(b):
@@ -230,7 +223,7 @@ def nullValue():
def _timestamp_value(seconds, nanos):
- return document_pb2.Value(
+ return document.Value(
timestamp_value=timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
)
diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py
index bdb0e922d0..91172b120b 100644
--- a/tests/unit/v1/test_query.py
+++ b/tests/unit/v1/test_query.py
@@ -12,19 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import datetime
import types
import unittest
import mock
-import six
+import pytest
+from tests.unit.v1.test_base_query import _make_credentials
+from tests.unit.v1.test_base_query import _make_cursor_pb
+from tests.unit.v1.test_base_query import _make_query_response
-class TestQuery(unittest.TestCase):
-
- if six.PY2:
- assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
+class TestQuery(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.firestore_v1.query import Query
@@ -35,7 +34,7 @@ def _make_one(self, *args, **kwargs):
klass = self._get_target_class()
return klass(*args, **kwargs)
- def test_constructor_defaults(self):
+ def test_constructor(self):
query = self._make_one(mock.sentinel.parent)
self.assertIs(query._parent, mock.sentinel.parent)
self.assertIsNone(query._projection)
@@ -47,1023 +46,64 @@ def test_constructor_defaults(self):
self.assertIsNone(query._end_at)
self.assertFalse(query._all_descendants)
- def _make_one_all_fields(
- self, limit=9876, offset=12, skip_fields=(), parent=None, all_descendants=True
- ):
- kwargs = {
- "projection": mock.sentinel.projection,
- "field_filters": mock.sentinel.filters,
- "orders": mock.sentinel.orders,
- "limit": limit,
- "offset": offset,
- "start_at": mock.sentinel.start_at,
- "end_at": mock.sentinel.end_at,
- "all_descendants": all_descendants,
- }
- for field in skip_fields:
- kwargs.pop(field)
- if parent is None:
- parent = mock.sentinel.parent
- return self._make_one(parent, **kwargs)
-
- def test_constructor_explicit(self):
- limit = 234
- offset = 56
- query = self._make_one_all_fields(limit=limit, offset=offset)
- self.assertIs(query._parent, mock.sentinel.parent)
- self.assertIs(query._projection, mock.sentinel.projection)
- self.assertIs(query._field_filters, mock.sentinel.filters)
- self.assertEqual(query._orders, mock.sentinel.orders)
- self.assertEqual(query._limit, limit)
- self.assertEqual(query._offset, offset)
- self.assertIs(query._start_at, mock.sentinel.start_at)
- self.assertIs(query._end_at, mock.sentinel.end_at)
- self.assertTrue(query._all_descendants)
-
- def test__client_property(self):
- parent = mock.Mock(_client=mock.sentinel.client, spec=["_client"])
- query = self._make_one(parent)
- self.assertIs(query._client, mock.sentinel.client)
-
- def test___eq___other_type(self):
- query = self._make_one_all_fields()
- other = object()
- self.assertFalse(query == other)
-
- def test___eq___different_parent(self):
- parent = mock.sentinel.parent
- other_parent = mock.sentinel.other_parent
- query = self._make_one_all_fields(parent=parent)
- other = self._make_one_all_fields(parent=other_parent)
- self.assertFalse(query == other)
-
- def test___eq___different_projection(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, skip_fields=("projection",))
- query._projection = mock.sentinel.projection
- other = self._make_one_all_fields(parent=parent, skip_fields=("projection",))
- other._projection = mock.sentinel.other_projection
- self.assertFalse(query == other)
-
- def test___eq___different_field_filters(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, skip_fields=("field_filters",))
- query._field_filters = mock.sentinel.field_filters
- other = self._make_one_all_fields(parent=parent, skip_fields=("field_filters",))
- other._field_filters = mock.sentinel.other_field_filters
- self.assertFalse(query == other)
-
- def test___eq___different_orders(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, skip_fields=("orders",))
- query._orders = mock.sentinel.orders
- other = self._make_one_all_fields(parent=parent, skip_fields=("orders",))
- other._orders = mock.sentinel.other_orders
- self.assertFalse(query == other)
-
- def test___eq___different_limit(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, limit=10)
- other = self._make_one_all_fields(parent=parent, limit=20)
- self.assertFalse(query == other)
-
- def test___eq___different_offset(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, offset=10)
- other = self._make_one_all_fields(parent=parent, offset=20)
- self.assertFalse(query == other)
-
- def test___eq___different_start_at(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, skip_fields=("start_at",))
- query._start_at = mock.sentinel.start_at
- other = self._make_one_all_fields(parent=parent, skip_fields=("start_at",))
- other._start_at = mock.sentinel.other_start_at
- self.assertFalse(query == other)
-
- def test___eq___different_end_at(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, skip_fields=("end_at",))
- query._end_at = mock.sentinel.end_at
- other = self._make_one_all_fields(parent=parent, skip_fields=("end_at",))
- other._end_at = mock.sentinel.other_end_at
- self.assertFalse(query == other)
-
- def test___eq___different_all_descendants(self):
- parent = mock.sentinel.parent
- query = self._make_one_all_fields(parent=parent, all_descendants=True)
- other = self._make_one_all_fields(parent=parent, all_descendants=False)
- self.assertFalse(query == other)
-
- def test___eq___hit(self):
- query = self._make_one_all_fields()
- other = self._make_one_all_fields()
- self.assertTrue(query == other)
-
- def _compare_queries(self, query1, query2, attr_name):
- attrs1 = query1.__dict__.copy()
- attrs2 = query2.__dict__.copy()
-
- attrs1.pop(attr_name)
- attrs2.pop(attr_name)
-
- # The only different should be in ``attr_name``.
- self.assertEqual(len(attrs1), len(attrs2))
- for key, value in attrs1.items():
- self.assertIs(value, attrs2[key])
-
- @staticmethod
- def _make_projection_for_select(field_paths):
- from google.cloud.firestore_v1.proto import query_pb2
-
- return query_pb2.StructuredQuery.Projection(
- fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
- for field_path in field_paths
- ]
- )
-
- def test_select_invalid_path(self):
- query = self._make_one(mock.sentinel.parent)
-
- with self.assertRaises(ValueError):
- query.select(["*"])
-
- def test_select(self):
- query1 = self._make_one_all_fields(all_descendants=True)
-
- field_paths2 = ["foo", "bar"]
- query2 = query1.select(field_paths2)
- self.assertIsNot(query2, query1)
- self.assertIsInstance(query2, self._get_target_class())
- self.assertEqual(
- query2._projection, self._make_projection_for_select(field_paths2)
- )
- self._compare_queries(query1, query2, "_projection")
-
- # Make sure it overrides.
- field_paths3 = ["foo.baz"]
- query3 = query2.select(field_paths3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(
- query3._projection, self._make_projection_for_select(field_paths3)
- )
- self._compare_queries(query2, query3, "_projection")
-
- def test_where_invalid_path(self):
- query = self._make_one(mock.sentinel.parent)
-
- with self.assertRaises(ValueError):
- query.where("*", "==", 1)
-
- def test_where(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- query = self._make_one_all_fields(
- skip_fields=("field_filters",), all_descendants=True
- )
- new_query = query.where("power.level", ">", 9000)
-
- self.assertIsNot(query, new_query)
- self.assertIsInstance(new_query, self._get_target_class())
- self.assertEqual(len(new_query._field_filters), 1)
-
- field_pb = new_query._field_filters[0]
- expected_pb = query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="power.level"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(integer_value=9000),
- )
- self.assertEqual(field_pb, expected_pb)
- self._compare_queries(query, new_query, "_field_filters")
-
- def _where_unary_helper(self, value, op_enum, op_string="=="):
- from google.cloud.firestore_v1.proto import query_pb2
-
- query = self._make_one_all_fields(skip_fields=("field_filters",))
- field_path = "feeeld"
- new_query = query.where(field_path, op_string, value)
-
- self.assertIsNot(query, new_query)
- self.assertIsInstance(new_query, self._get_target_class())
- self.assertEqual(len(new_query._field_filters), 1)
-
- field_pb = new_query._field_filters[0]
- expected_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=op_enum,
- )
- self.assertEqual(field_pb, expected_pb)
- self._compare_queries(query, new_query, "_field_filters")
-
- def test_where_eq_null(self):
- from google.cloud.firestore_v1.gapic import enums
-
- op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NULL
- self._where_unary_helper(None, op_enum)
-
- def test_where_gt_null(self):
- with self.assertRaises(ValueError):
- self._where_unary_helper(None, 0, op_string=">")
-
- def test_where_eq_nan(self):
- from google.cloud.firestore_v1.gapic import enums
-
- op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NAN
- self._where_unary_helper(float("nan"), op_enum)
-
- def test_where_le_nan(self):
- with self.assertRaises(ValueError):
- self._where_unary_helper(float("nan"), 0, op_string="<=")
-
- def test_where_w_delete(self):
- from google.cloud.firestore_v1 import DELETE_FIELD
-
- with self.assertRaises(ValueError):
- self._where_unary_helper(DELETE_FIELD, 0)
-
- def test_where_w_server_timestamp(self):
- from google.cloud.firestore_v1 import SERVER_TIMESTAMP
-
- with self.assertRaises(ValueError):
- self._where_unary_helper(SERVER_TIMESTAMP, 0)
-
- def test_where_w_array_remove(self):
- from google.cloud.firestore_v1 import ArrayRemove
-
- with self.assertRaises(ValueError):
- self._where_unary_helper(ArrayRemove([1, 3, 5]), 0)
-
- def test_where_w_array_union(self):
- from google.cloud.firestore_v1 import ArrayUnion
-
- with self.assertRaises(ValueError):
- self._where_unary_helper(ArrayUnion([2, 4, 8]), 0)
-
- def test_order_by_invalid_path(self):
- query = self._make_one(mock.sentinel.parent)
-
- with self.assertRaises(ValueError):
- query.order_by("*")
-
- def test_order_by(self):
- from google.cloud.firestore_v1.gapic import enums
-
- klass = self._get_target_class()
- query1 = self._make_one_all_fields(
- skip_fields=("orders",), all_descendants=True
- )
-
- field_path2 = "a"
- query2 = query1.order_by(field_path2)
- self.assertIsNot(query2, query1)
- self.assertIsInstance(query2, klass)
- order_pb2 = _make_order_pb(
- field_path2, enums.StructuredQuery.Direction.ASCENDING
- )
- self.assertEqual(query2._orders, (order_pb2,))
- self._compare_queries(query1, query2, "_orders")
-
- # Make sure it appends to the orders.
- field_path3 = "b"
- query3 = query2.order_by(field_path3, direction=klass.DESCENDING)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, klass)
- order_pb3 = _make_order_pb(
- field_path3, enums.StructuredQuery.Direction.DESCENDING
- )
- self.assertEqual(query3._orders, (order_pb2, order_pb3))
- self._compare_queries(query2, query3, "_orders")
-
- def test_limit(self):
- query1 = self._make_one_all_fields(all_descendants=True)
-
- limit2 = 100
- query2 = query1.limit(limit2)
- self.assertIsNot(query2, query1)
- self.assertIsInstance(query2, self._get_target_class())
- self.assertEqual(query2._limit, limit2)
- self._compare_queries(query1, query2, "_limit")
-
- # Make sure it overrides.
- limit3 = 10
- query3 = query2.limit(limit3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(query3._limit, limit3)
- self._compare_queries(query2, query3, "_limit")
-
- def test_offset(self):
- query1 = self._make_one_all_fields(all_descendants=True)
-
- offset2 = 23
- query2 = query1.offset(offset2)
- self.assertIsNot(query2, query1)
- self.assertIsInstance(query2, self._get_target_class())
- self.assertEqual(query2._offset, offset2)
- self._compare_queries(query1, query2, "_offset")
-
- # Make sure it overrides.
- offset3 = 35
- query3 = query2.offset(offset3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(query3._offset, offset3)
- self._compare_queries(query2, query3, "_offset")
-
- @staticmethod
- def _make_collection(*path, **kw):
- from google.cloud.firestore_v1 import collection
+ def _get_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
- return collection.CollectionReference(*path, **kw)
+ # Create a minimal fake GAPIC.
+ firestore_api = mock.Mock(spec=["run_query"])
- @staticmethod
- def _make_docref(*path, **kw):
- from google.cloud.firestore_v1 import document
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
- return document.DocumentReference(*path, **kw)
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dee")
- @staticmethod
- def _make_snapshot(docref, values):
- from google.cloud.firestore_v1 import document
-
- return document.DocumentSnapshot(docref, values, True, None, None, None)
-
- def test__cursor_helper_w_dict(self):
- values = {"a": 7, "b": "foo"}
- query1 = self._make_one(mock.sentinel.parent)
- query1._all_descendants = True
- query2 = query1._cursor_helper(values, True, True)
-
- self.assertIs(query2._parent, mock.sentinel.parent)
- self.assertIsNone(query2._projection)
- self.assertEqual(query2._field_filters, ())
- self.assertEqual(query2._orders, query1._orders)
- self.assertIsNone(query2._limit)
- self.assertIsNone(query2._offset)
- self.assertIsNone(query2._end_at)
- self.assertTrue(query2._all_descendants)
-
- cursor, before = query2._start_at
-
- self.assertEqual(cursor, values)
- self.assertTrue(before)
-
- def test__cursor_helper_w_tuple(self):
- values = (7, "foo")
- query1 = self._make_one(mock.sentinel.parent)
- query2 = query1._cursor_helper(values, False, True)
-
- self.assertIs(query2._parent, mock.sentinel.parent)
- self.assertIsNone(query2._projection)
- self.assertEqual(query2._field_filters, ())
- self.assertEqual(query2._orders, query1._orders)
- self.assertIsNone(query2._limit)
- self.assertIsNone(query2._offset)
- self.assertIsNone(query2._end_at)
-
- cursor, before = query2._start_at
-
- self.assertEqual(cursor, list(values))
- self.assertFalse(before)
-
- def test__cursor_helper_w_list(self):
- values = [7, "foo"]
- query1 = self._make_one(mock.sentinel.parent)
- query2 = query1._cursor_helper(values, True, False)
-
- self.assertIs(query2._parent, mock.sentinel.parent)
- self.assertIsNone(query2._projection)
- self.assertEqual(query2._field_filters, ())
- self.assertEqual(query2._orders, query1._orders)
- self.assertIsNone(query2._limit)
- self.assertIsNone(query2._offset)
- self.assertIsNone(query2._start_at)
-
- cursor, before = query2._end_at
-
- self.assertEqual(cursor, values)
- self.assertIsNot(cursor, values)
- self.assertTrue(before)
-
- def test__cursor_helper_w_snapshot_wrong_collection(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("there", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query = self._make_one(collection)
+ # Add a dummy response to the minimal fake GAPIC.
+ _, expected_prefix = parent._parent_info()
+ name = "{}/sleep".format(expected_prefix)
+ data = {"snooze": 10}
- with self.assertRaises(ValueError):
- query._cursor_helper(snapshot, False, False)
-
- def test__cursor_helper_w_snapshot_other_collection_all_descendants(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("there", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query1 = self._make_one(collection, all_descendants=True)
-
- query2 = query1._cursor_helper(snapshot, False, False)
-
- self.assertIs(query2._parent, collection)
- self.assertIsNone(query2._projection)
- self.assertEqual(query2._field_filters, ())
- self.assertEqual(query2._orders, ())
- self.assertIsNone(query2._limit)
- self.assertIsNone(query2._offset)
- self.assertIsNone(query2._start_at)
-
- cursor, before = query2._end_at
-
- self.assertIs(cursor, snapshot)
- self.assertFalse(before)
-
- def test__cursor_helper_w_snapshot(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("here", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query1 = self._make_one(collection)
-
- query2 = query1._cursor_helper(snapshot, False, False)
-
- self.assertIs(query2._parent, collection)
- self.assertIsNone(query2._projection)
- self.assertEqual(query2._field_filters, ())
- self.assertEqual(query2._orders, ())
- self.assertIsNone(query2._limit)
- self.assertIsNone(query2._offset)
- self.assertIsNone(query2._start_at)
-
- cursor, before = query2._end_at
-
- self.assertIs(cursor, snapshot)
- self.assertFalse(before)
-
- def test_start_at(self):
- collection = self._make_collection("here")
- query1 = self._make_one_all_fields(
- parent=collection, skip_fields=("orders",), all_descendants=True
- )
- query2 = query1.order_by("hi")
-
- document_fields3 = {"hi": "mom"}
- query3 = query2.start_at(document_fields3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(query3._start_at, (document_fields3, True))
- self._compare_queries(query2, query3, "_start_at")
-
- # Make sure it overrides.
- query4 = query3.order_by("bye")
- values5 = {"hi": "zap", "bye": 88}
- docref = self._make_docref("here", "doc_id")
- document_fields5 = self._make_snapshot(docref, values5)
- query5 = query4.start_at(document_fields5)
- self.assertIsNot(query5, query4)
- self.assertIsInstance(query5, self._get_target_class())
- self.assertEqual(query5._start_at, (document_fields5, True))
- self._compare_queries(query4, query5, "_start_at")
-
- def test_start_after(self):
- collection = self._make_collection("here")
- query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",))
- query2 = query1.order_by("down")
-
- document_fields3 = {"down": 99.75}
- query3 = query2.start_after(document_fields3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(query3._start_at, (document_fields3, False))
- self._compare_queries(query2, query3, "_start_at")
-
- # Make sure it overrides.
- query4 = query3.order_by("out")
- values5 = {"down": 100.25, "out": b"\x00\x01"}
- docref = self._make_docref("here", "doc_id")
- document_fields5 = self._make_snapshot(docref, values5)
- query5 = query4.start_after(document_fields5)
- self.assertIsNot(query5, query4)
- self.assertIsInstance(query5, self._get_target_class())
- self.assertEqual(query5._start_at, (document_fields5, False))
- self._compare_queries(query4, query5, "_start_at")
-
- def test_end_before(self):
- collection = self._make_collection("here")
- query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",))
- query2 = query1.order_by("down")
-
- document_fields3 = {"down": 99.75}
- query3 = query2.end_before(document_fields3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(query3._end_at, (document_fields3, True))
- self._compare_queries(query2, query3, "_end_at")
-
- # Make sure it overrides.
- query4 = query3.order_by("out")
- values5 = {"down": 100.25, "out": b"\x00\x01"}
- docref = self._make_docref("here", "doc_id")
- document_fields5 = self._make_snapshot(docref, values5)
- query5 = query4.end_before(document_fields5)
- self.assertIsNot(query5, query4)
- self.assertIsInstance(query5, self._get_target_class())
- self.assertEqual(query5._end_at, (document_fields5, True))
- self._compare_queries(query4, query5, "_end_at")
- self._compare_queries(query4, query5, "_end_at")
-
- def test_end_at(self):
- collection = self._make_collection("here")
- query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",))
- query2 = query1.order_by("hi")
-
- document_fields3 = {"hi": "mom"}
- query3 = query2.end_at(document_fields3)
- self.assertIsNot(query3, query2)
- self.assertIsInstance(query3, self._get_target_class())
- self.assertEqual(query3._end_at, (document_fields3, False))
- self._compare_queries(query2, query3, "_end_at")
-
- # Make sure it overrides.
- query4 = query3.order_by("bye")
- values5 = {"hi": "zap", "bye": 88}
- docref = self._make_docref("here", "doc_id")
- document_fields5 = self._make_snapshot(docref, values5)
- query5 = query4.end_at(document_fields5)
- self.assertIsNot(query5, query4)
- self.assertIsInstance(query5, self._get_target_class())
- self.assertEqual(query5._end_at, (document_fields5, False))
- self._compare_queries(query4, query5, "_end_at")
-
- def test__filters_pb_empty(self):
- query = self._make_one(mock.sentinel.parent)
- self.assertEqual(len(query._field_filters), 0)
- self.assertIsNone(query._filters_pb())
-
- def test__filters_pb_single(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- query1 = self._make_one(mock.sentinel.parent)
- query2 = query1.where("x.y", ">", 50.5)
- filter_pb = query2._filters_pb()
- expected_pb = query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="x.y"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(double_value=50.5),
- )
- )
- self.assertEqual(filter_pb, expected_pb)
-
- def test__filters_pb_multi(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- query1 = self._make_one(mock.sentinel.parent)
- query2 = query1.where("x.y", ">", 50.5)
- query3 = query2.where("ABC", "==", 123)
-
- filter_pb = query3._filters_pb()
- op_class = enums.StructuredQuery.FieldFilter.Operator
- expected_pb = query_pb2.StructuredQuery.Filter(
- composite_filter=query_pb2.StructuredQuery.CompositeFilter(
- op=enums.StructuredQuery.CompositeFilter.Operator.AND,
- filters=[
- query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(
- field_path="x.y"
- ),
- op=op_class.GREATER_THAN,
- value=document_pb2.Value(double_value=50.5),
- )
- ),
- query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(
- field_path="ABC"
- ),
- op=op_class.EQUAL,
- value=document_pb2.Value(integer_value=123),
- )
- ),
- ],
- )
- )
- self.assertEqual(filter_pb, expected_pb)
+ response_pb = _make_query_response(name=name, data=data)
+ firestore_api.run_query.return_value = iter([response_pb])
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
- def test__normalize_projection_none(self):
- query = self._make_one(mock.sentinel.parent)
- self.assertIsNone(query._normalize_projection(None))
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ returned = query.get(**kwargs)
- def test__normalize_projection_empty(self):
- projection = self._make_projection_for_select([])
- query = self._make_one(mock.sentinel.parent)
- normalized = query._normalize_projection(projection)
- field_paths = [field_ref.field_path for field_ref in normalized.fields]
- self.assertEqual(field_paths, ["__name__"])
+ self.assertIsInstance(returned, list)
+ self.assertEqual(len(returned), 1)
- def test__normalize_projection_non_empty(self):
- projection = self._make_projection_for_select(["a", "b"])
- query = self._make_one(mock.sentinel.parent)
- self.assertIs(query._normalize_projection(projection), projection)
+ snapshot = returned[0]
+ self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
+ self.assertEqual(snapshot.to_dict(), data)
- def test__normalize_orders_wo_orders_wo_cursors(self):
- query = self._make_one(mock.sentinel.parent)
- expected = []
- self.assertEqual(query._normalize_orders(), expected)
-
- def test__normalize_orders_w_orders_wo_cursors(self):
- query = self._make_one(mock.sentinel.parent).order_by("a")
- expected = [query._make_order("a", "ASCENDING")]
- self.assertEqual(query._normalize_orders(), expected)
-
- def test__normalize_orders_wo_orders_w_snapshot_cursor(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("here", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query = self._make_one(collection).start_at(snapshot)
- expected = [query._make_order("__name__", "ASCENDING")]
- self.assertEqual(query._normalize_orders(), expected)
-
- def test__normalize_orders_w_name_orders_w_snapshot_cursor(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("here", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query = (
- self._make_one(collection)
- .order_by("__name__", "DESCENDING")
- .start_at(snapshot)
- )
- expected = [query._make_order("__name__", "DESCENDING")]
- self.assertEqual(query._normalize_orders(), expected)
-
- def test__normalize_orders_wo_orders_w_snapshot_cursor_w_neq_exists(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("here", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query = (
- self._make_one(collection)
- .where("c", "<=", 20)
- .order_by("c", "DESCENDING")
- .start_at(snapshot)
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ firestore_api.run_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
)
- expected = [
- query._make_order("c", "DESCENDING"),
- query._make_order("__name__", "DESCENDING"),
- ]
- self.assertEqual(query._normalize_orders(), expected)
-
- def test__normalize_orders_wo_orders_w_snapshot_cursor_w_neq_where(self):
- values = {"a": 7, "b": "foo"}
- docref = self._make_docref("here", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- collection = self._make_collection("here")
- query = self._make_one(collection).where("c", "<=", 20).end_at(snapshot)
- expected = [
- query._make_order("c", "ASCENDING"),
- query._make_order("__name__", "ASCENDING"),
- ]
- self.assertEqual(query._normalize_orders(), expected)
-
- def test__normalize_cursor_none(self):
- query = self._make_one(mock.sentinel.parent)
- self.assertIsNone(query._normalize_cursor(None, query._orders))
-
- def test__normalize_cursor_no_order(self):
- cursor = ([1], True)
- query = self._make_one(mock.sentinel.parent)
-
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
-
- def test__normalize_cursor_as_list_mismatched_order(self):
- cursor = ([1, 2], True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
-
- def test__normalize_cursor_as_dict_mismatched_order(self):
- cursor = ({"a": 1}, True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
-
- def test__normalize_cursor_w_delete(self):
- from google.cloud.firestore_v1 import DELETE_FIELD
-
- cursor = ([DELETE_FIELD], True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
- def test__normalize_cursor_w_server_timestamp(self):
- from google.cloud.firestore_v1 import SERVER_TIMESTAMP
+ def test_get(self):
+ self._get_helper()
- cursor = ([SERVER_TIMESTAMP], True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
+ def test_get_w_retry_timeout(self):
+ from google.api_core.retry import Retry
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
-
- def test__normalize_cursor_w_array_remove(self):
- from google.cloud.firestore_v1 import ArrayRemove
-
- cursor = ([ArrayRemove([1, 3, 5])], True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_helper(retry=retry, timeout=timeout)
- def test__normalize_cursor_w_array_union(self):
- from google.cloud.firestore_v1 import ArrayUnion
-
- cursor = ([ArrayUnion([2, 4, 8])], True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- with self.assertRaises(ValueError):
- query._normalize_cursor(cursor, query._orders)
-
- def test__normalize_cursor_as_list_hit(self):
- cursor = ([1], True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
-
- def test__normalize_cursor_as_dict_hit(self):
- cursor = ({"b": 1}, True)
- query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING")
-
- self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
-
- def test__normalize_cursor_as_dict_with_dot_key_hit(self):
- cursor = ({"b.a": 1}, True)
- query = self._make_one(mock.sentinel.parent).order_by("b.a", "ASCENDING")
- self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
-
- def test__normalize_cursor_as_dict_with_inner_data_hit(self):
- cursor = ({"b": {"a": 1}}, True)
- query = self._make_one(mock.sentinel.parent).order_by("b.a", "ASCENDING")
- self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
-
- def test__normalize_cursor_as_snapshot_hit(self):
- values = {"b": 1}
- docref = self._make_docref("here", "doc_id")
- snapshot = self._make_snapshot(docref, values)
- cursor = (snapshot, True)
- collection = self._make_collection("here")
- query = self._make_one(collection).order_by("b", "ASCENDING")
-
- self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))
-
- def test__normalize_cursor_w___name___w_reference(self):
- db_string = "projects/my-project/database/(default)"
- client = mock.Mock(spec=["_database_string"])
- client._database_string = db_string
- parent = mock.Mock(spec=["_path", "_client"])
- parent._client = client
- parent._path = ["C"]
- query = self._make_one(parent).order_by("__name__", "ASCENDING")
- docref = self._make_docref("here", "doc_id")
- values = {"a": 7}
- snapshot = self._make_snapshot(docref, values)
- expected = docref
- cursor = (snapshot, True)
-
- self.assertEqual(
- query._normalize_cursor(cursor, query._orders), ([expected], True)
- )
-
- def test__normalize_cursor_w___name___wo_slash(self):
- db_string = "projects/my-project/database/(default)"
- client = mock.Mock(spec=["_database_string"])
- client._database_string = db_string
- parent = mock.Mock(spec=["_path", "_client", "document"])
- parent._client = client
- parent._path = ["C"]
- document = parent.document.return_value = mock.Mock(spec=[])
- query = self._make_one(parent).order_by("__name__", "ASCENDING")
- cursor = (["b"], True)
- expected = document
-
- self.assertEqual(
- query._normalize_cursor(cursor, query._orders), ([expected], True)
- )
- parent.document.assert_called_once_with("b")
-
- def test__to_protobuf_all_fields(self):
- from google.protobuf import wrappers_pb2
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="cat", spec=["id"])
- query1 = self._make_one(parent)
- query2 = query1.select(["X", "Y", "Z"])
- query3 = query2.where("Y", ">", 2.5)
- query4 = query3.order_by("X")
- query5 = query4.limit(17)
- query6 = query5.offset(3)
- query7 = query6.start_at({"X": 10})
- query8 = query7.end_at({"X": 25})
-
- structured_query_pb = query8._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "select": query_pb2.StructuredQuery.Projection(
- fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
- for field_path in ["X", "Y", "Z"]
- ]
- ),
- "where": query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="Y"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(double_value=2.5),
- )
- ),
- "order_by": [
- _make_order_pb("X", enums.StructuredQuery.Direction.ASCENDING)
- ],
- "start_at": query_pb2.Cursor(
- values=[document_pb2.Value(integer_value=10)], before=True
- ),
- "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=25)]),
- "offset": 3,
- "limit": wrappers_pb2.Int32Value(value=17),
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_select_only(self):
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="cat", spec=["id"])
- query1 = self._make_one(parent)
- field_paths = ["a.b", "a.c", "d"]
- query2 = query1.select(field_paths)
-
- structured_query_pb = query2._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "select": query_pb2.StructuredQuery.Projection(
- fields=[
- query_pb2.StructuredQuery.FieldReference(field_path=field_path)
- for field_path in field_paths
- ]
- ),
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_where_only(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="dog", spec=["id"])
- query1 = self._make_one(parent)
- query2 = query1.where("a", "==", u"b")
-
- structured_query_pb = query2._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "where": query_pb2.StructuredQuery.Filter(
- field_filter=query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="a"),
- op=enums.StructuredQuery.FieldFilter.Operator.EQUAL,
- value=document_pb2.Value(string_value=u"b"),
- )
- ),
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_order_by_only(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="fish", spec=["id"])
- query1 = self._make_one(parent)
- query2 = query1.order_by("abc")
-
- structured_query_pb = query2._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "order_by": [
- _make_order_pb("abc", enums.StructuredQuery.Direction.ASCENDING)
- ],
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_start_at_only(self):
- # NOTE: "only" is wrong since we must have ``order_by`` as well.
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="phish", spec=["id"])
- query = self._make_one(parent).order_by("X.Y").start_after({"X": {"Y": u"Z"}})
-
- structured_query_pb = query._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "order_by": [
- _make_order_pb("X.Y", enums.StructuredQuery.Direction.ASCENDING)
- ],
- "start_at": query_pb2.Cursor(
- values=[document_pb2.Value(string_value=u"Z")]
- ),
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_end_at_only(self):
- # NOTE: "only" is wrong since we must have ``order_by`` as well.
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="ghoti", spec=["id"])
- query = self._make_one(parent).order_by("a").end_at({"a": 88})
-
- structured_query_pb = query._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "order_by": [
- _make_order_pb("a", enums.StructuredQuery.Direction.ASCENDING)
- ],
- "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=88)]),
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_offset_only(self):
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="cartt", spec=["id"])
- query1 = self._make_one(parent)
- offset = 14
- query2 = query1.offset(offset)
-
- structured_query_pb = query2._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "offset": offset,
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test__to_protobuf_limit_only(self):
- from google.protobuf import wrappers_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- parent = mock.Mock(id="donut", spec=["id"])
- query1 = self._make_one(parent)
- limit = 31
- query2 = query1.limit(limit)
-
- structured_query_pb = query2._to_protobuf()
- query_kwargs = {
- "from": [
- query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id)
- ],
- "limit": wrappers_pb2.Int32Value(value=limit),
- }
- expected_pb = query_pb2.StructuredQuery(**query_kwargs)
-
- self.assertEqual(structured_query_pb, expected_pb)
-
- def test_get_simple(self):
- import warnings
+ def test_get_limit_to_last(self):
+ from google.cloud import firestore
+ from google.cloud.firestore_v1.base_query import _enum_from_direction
# Create a minimal fake GAPIC.
firestore_api = mock.Mock(spec=["run_query"])
@@ -1079,36 +119,48 @@ def test_get_simple(self):
_, expected_prefix = parent._parent_info()
name = "{}/sleep".format(expected_prefix)
data = {"snooze": 10}
+ data2 = {"snooze": 20}
+
response_pb = _make_query_response(name=name, data=data)
- firestore_api.run_query.return_value = iter([response_pb])
+ response_pb2 = _make_query_response(name=name, data=data2)
+
+ firestore_api.run_query.return_value = iter([response_pb2, response_pb])
# Execute the query and check the response.
query = self._make_one(parent)
+ query = query.order_by(
+ "snooze", direction=firestore.Query.DESCENDING
+ ).limit_to_last(2)
+ returned = query.get()
- with warnings.catch_warnings(record=True) as warned:
- get_response = query.get()
+ self.assertIsInstance(returned, list)
+ self.assertEqual(
+ query._orders[0].direction, _enum_from_direction(firestore.Query.ASCENDING)
+ )
+ self.assertEqual(len(returned), 2)
- self.assertIsInstance(get_response, types.GeneratorType)
- returned = list(get_response)
- self.assertEqual(len(returned), 1)
snapshot = returned[0]
self.assertEqual(snapshot.reference._path, ("dee", "sleep"))
self.assertEqual(snapshot.to_dict(), data)
+ snapshot2 = returned[1]
+ self.assertEqual(snapshot2.reference._path, ("dee", "sleep"))
+ self.assertEqual(snapshot2.to_dict(), data2)
+
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
- # Verify the deprecation
- self.assertEqual(len(warned), 1)
- self.assertIs(warned[0].category, DeprecationWarning)
+ def _stream_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
- def test_stream_simple(self):
# Create a minimal fake GAPIC.
firestore_api = mock.Mock(spec=["run_query"])
@@ -1125,10 +177,13 @@ def test_stream_simple(self):
data = {"snooze": 10}
response_pb = _make_query_response(name=name, data=data)
firestore_api.run_query.return_value = iter([response_pb])
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
# Execute the query and check the response.
query = self._make_one(parent)
- get_response = query.stream()
+
+ get_response = query.stream(**kwargs)
+
self.assertIsInstance(get_response, types.GeneratorType)
returned = list(get_response)
self.assertEqual(len(returned), 1)
@@ -1139,12 +194,39 @@ def test_stream_simple(self):
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
+ **kwargs,
)
+ def test_stream_simple(self):
+ self._stream_helper()
+
+ def test_stream_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._stream_helper(retry=retry, timeout=timeout)
+
+ def test_stream_with_limit_to_last(self):
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ # Make a **real** collection reference as parent.
+ parent = client.collection("dee")
+ # Execute the query and check the response.
+ query = self._make_one(parent)
+ query = query.limit_to_last(2)
+
+ stream_response = query.stream()
+
+ with self.assertRaises(ValueError):
+ list(stream_response)
+
def test_stream_with_transaction(self):
# Create a minimal fake GAPIC.
firestore_api = mock.Mock(spec=["run_query"])
@@ -1180,9 +262,11 @@ def test_stream_with_transaction(self):
# Verify the mock call.
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=txn_id,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
@@ -1208,9 +292,11 @@ def test_stream_no_results(self):
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -1237,9 +323,11 @@ def test_stream_second_response_in_empty_stream(self):
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -1275,9 +363,11 @@ def test_stream_with_skipped_results(self):
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -1313,9 +403,11 @@ def test_stream_empty_after_first_response(self):
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -1354,9 +446,11 @@ def test_stream_w_collection_group(self):
# Verify the mock call.
parent_path, _ = parent._parent_info()
firestore_api.run_query.assert_called_once_with(
- parent_path,
- query._to_protobuf(),
- transaction=None,
+ request={
+ "parent": parent_path,
+ "structured_query": query._to_protobuf(),
+ "transaction": None,
+ },
metadata=client._rpc_metadata,
)
@@ -1366,340 +460,130 @@ def test_on_snapshot(self, watch):
query.on_snapshot(None)
watch.for_query.assert_called_once()
- def test_comparator_no_ordering(self):
- query = self._make_one(mock.sentinel.parent)
- query._orders = []
- doc1 = mock.Mock()
- doc1.reference._path = ("col", "adocument1")
-
- doc2 = mock.Mock()
- doc2.reference._path = ("col", "adocument2")
-
- sort = query._comparator(doc1, doc2)
- self.assertEqual(sort, -1)
-
- def test_comparator_no_ordering_same_id(self):
- query = self._make_one(mock.sentinel.parent)
- query._orders = []
- doc1 = mock.Mock()
- doc1.reference._path = ("col", "adocument1")
-
- doc2 = mock.Mock()
- doc2.reference._path = ("col", "adocument1")
-
- sort = query._comparator(doc1, doc2)
- self.assertEqual(sort, 0)
-
- def test_comparator_ordering(self):
- query = self._make_one(mock.sentinel.parent)
- orderByMock = mock.Mock()
- orderByMock.field.field_path = "last"
- orderByMock.direction = 1 # ascending
- query._orders = [orderByMock]
-
- doc1 = mock.Mock()
- doc1.reference._path = ("col", "adocument1")
- doc1._data = {
- "first": {"stringValue": "Ada"},
- "last": {"stringValue": "secondlovelace"},
- }
- doc2 = mock.Mock()
- doc2.reference._path = ("col", "adocument2")
- doc2._data = {
- "first": {"stringValue": "Ada"},
- "last": {"stringValue": "lovelace"},
- }
-
- sort = query._comparator(doc1, doc2)
- self.assertEqual(sort, 1)
-
- def test_comparator_ordering_descending(self):
- query = self._make_one(mock.sentinel.parent)
- orderByMock = mock.Mock()
- orderByMock.field.field_path = "last"
- orderByMock.direction = -1 # descending
- query._orders = [orderByMock]
-
- doc1 = mock.Mock()
- doc1.reference._path = ("col", "adocument1")
- doc1._data = {
- "first": {"stringValue": "Ada"},
- "last": {"stringValue": "secondlovelace"},
- }
- doc2 = mock.Mock()
- doc2.reference._path = ("col", "adocument2")
- doc2._data = {
- "first": {"stringValue": "Ada"},
- "last": {"stringValue": "lovelace"},
- }
-
- sort = query._comparator(doc1, doc2)
- self.assertEqual(sort, -1)
-
- def test_comparator_missing_order_by_field_in_data_raises(self):
- query = self._make_one(mock.sentinel.parent)
- orderByMock = mock.Mock()
- orderByMock.field.field_path = "last"
- orderByMock.direction = 1 # ascending
- query._orders = [orderByMock]
-
- doc1 = mock.Mock()
- doc1.reference._path = ("col", "adocument1")
- doc1._data = {}
- doc2 = mock.Mock()
- doc2.reference._path = ("col", "adocument2")
- doc2._data = {
- "first": {"stringValue": "Ada"},
- "last": {"stringValue": "lovelace"},
- }
-
- with self.assertRaisesRegex(ValueError, "Can only compare fields "):
- query._comparator(doc1, doc2)
-
-
-class Test__enum_from_op_string(unittest.TestCase):
- @staticmethod
- def _call_fut(op_string):
- from google.cloud.firestore_v1.query import _enum_from_op_string
-
- return _enum_from_op_string(op_string)
-
- @staticmethod
- def _get_op_class():
- from google.cloud.firestore_v1.gapic import enums
-
- return enums.StructuredQuery.FieldFilter.Operator
-
- def test_lt(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut("<"), op_class.LESS_THAN)
-
- def test_le(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL)
-
- def test_eq(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut("=="), op_class.EQUAL)
-
- def test_ge(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL)
-
- def test_gt(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN)
-
- def test_array_contains(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS)
-
- def test_in(self):
- op_class = self._get_op_class()
- self.assertEqual(self._call_fut("in"), op_class.IN)
-
- def test_array_contains_any(self):
- op_class = self._get_op_class()
- self.assertEqual(
- self._call_fut("array_contains_any"), op_class.ARRAY_CONTAINS_ANY
- )
-
- def test_invalid(self):
- with self.assertRaises(ValueError):
- self._call_fut("?")
-
-class Test__isnan(unittest.TestCase):
+class TestCollectionGroup(unittest.TestCase):
@staticmethod
- def _call_fut(value):
- from google.cloud.firestore_v1.query import _isnan
-
- return _isnan(value)
+ def _get_target_class():
+ from google.cloud.firestore_v1.query import CollectionGroup
- def test_valid(self):
- self.assertTrue(self._call_fut(float("nan")))
+ return CollectionGroup
- def test_invalid(self):
- self.assertFalse(self._call_fut(51.5))
- self.assertFalse(self._call_fut(None))
- self.assertFalse(self._call_fut("str"))
- self.assertFalse(self._call_fut(int))
- self.assertFalse(self._call_fut(1.0 + 1.0j))
+ def _make_one(self, *args, **kwargs):
+ klass = self._get_target_class()
+ return klass(*args, **kwargs)
+ def test_constructor(self):
+ query = self._make_one(mock.sentinel.parent)
+ self.assertIs(query._parent, mock.sentinel.parent)
+ self.assertIsNone(query._projection)
+ self.assertEqual(query._field_filters, ())
+ self.assertEqual(query._orders, ())
+ self.assertIsNone(query._limit)
+ self.assertIsNone(query._offset)
+ self.assertIsNone(query._start_at)
+ self.assertIsNone(query._end_at)
+ self.assertTrue(query._all_descendants)
-class Test__enum_from_direction(unittest.TestCase):
- @staticmethod
- def _call_fut(direction):
- from google.cloud.firestore_v1.query import _enum_from_direction
+ def test_constructor_all_descendents_is_false(self):
+ with pytest.raises(ValueError):
+ self._make_one(mock.sentinel.parent, all_descendants=False)
- return _enum_from_direction(direction)
+ def _get_partitions_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
- def test_success(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.query import Query
+ # Create a minimal fake GAPIC.
+ firestore_api = mock.Mock(spec=["partition_query"])
- dir_class = enums.StructuredQuery.Direction
- self.assertEqual(self._call_fut(Query.ASCENDING), dir_class.ASCENDING)
- self.assertEqual(self._call_fut(Query.DESCENDING), dir_class.DESCENDING)
+ # Attach the fake GAPIC to a real client.
+ client = _make_client()
+ client._firestore_api_internal = firestore_api
- # Ints pass through
- self.assertEqual(self._call_fut(dir_class.ASCENDING), dir_class.ASCENDING)
- self.assertEqual(self._call_fut(dir_class.DESCENDING), dir_class.DESCENDING)
+ # Make a **real** collection reference as parent.
+ parent = client.collection("charles")
- def test_failure(self):
- with self.assertRaises(ValueError):
- self._call_fut("neither-ASCENDING-nor-DESCENDING")
+ # Make two **real** document references to use as cursors
+ document1 = parent.document("one")
+ document2 = parent.document("two")
+ # Add cursor pb's to the minimal fake GAPIC.
+ cursor_pb1 = _make_cursor_pb(([document1], False))
+ cursor_pb2 = _make_cursor_pb(([document2], False))
+ firestore_api.partition_query.return_value = iter([cursor_pb1, cursor_pb2])
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
-class Test__filter_pb(unittest.TestCase):
- @staticmethod
- def _call_fut(field_or_unary):
- from google.cloud.firestore_v1.query import _filter_pb
+ # Execute the query and check the response.
+ query = self._make_one(parent)
- return _filter_pb(field_or_unary)
+ get_response = query.get_partitions(2, **kwargs)
- def test_unary(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import query_pb2
+ self.assertIsInstance(get_response, types.GeneratorType)
+ returned = list(get_response)
+ self.assertEqual(len(returned), 3)
- unary_pb = query_pb2.StructuredQuery.UnaryFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="a.b.c"),
- op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL,
- )
- filter_pb = self._call_fut(unary_pb)
- expected_pb = query_pb2.StructuredQuery.Filter(unary_filter=unary_pb)
- self.assertEqual(filter_pb, expected_pb)
-
- def test_field(self):
- from google.cloud.firestore_v1.gapic import enums
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import query_pb2
-
- field_filter_pb = query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path="XYZ"),
- op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN,
- value=document_pb2.Value(double_value=90.75),
+ # Verify the mock call.
+ parent_path, _ = parent._parent_info()
+ partition_query = self._make_one(
+ parent, orders=(query._make_order("__name__", query.ASCENDING),),
)
- filter_pb = self._call_fut(field_filter_pb)
- expected_pb = query_pb2.StructuredQuery.Filter(field_filter=field_filter_pb)
- self.assertEqual(filter_pb, expected_pb)
-
- def test_bad_type(self):
- with self.assertRaises(ValueError):
- self._call_fut(None)
-
-
-class Test__cursor_pb(unittest.TestCase):
- @staticmethod
- def _call_fut(cursor_pair):
- from google.cloud.firestore_v1.query import _cursor_pb
-
- return _cursor_pb(cursor_pair)
-
- def test_no_pair(self):
- self.assertIsNone(self._call_fut(None))
-
- def test_success(self):
- from google.cloud.firestore_v1.proto import query_pb2
- from google.cloud.firestore_v1 import _helpers
-
- data = [1.5, 10, True]
- cursor_pair = data, True
-
- cursor_pb = self._call_fut(cursor_pair)
-
- expected_pb = query_pb2.Cursor(
- values=[_helpers.encode_value(value) for value in data], before=True
+ firestore_api.partition_query.assert_called_once_with(
+ request={
+ "parent": parent_path,
+ "structured_query": partition_query._to_protobuf(),
+ "partition_count": 2,
+ },
+ metadata=client._rpc_metadata,
+ **kwargs,
)
- self.assertEqual(cursor_pb, expected_pb)
-
-class Test__query_response_to_snapshot(unittest.TestCase):
- @staticmethod
- def _call_fut(response_pb, collection, expected_prefix):
- from google.cloud.firestore_v1.query import _query_response_to_snapshot
-
- return _query_response_to_snapshot(response_pb, collection, expected_prefix)
-
- def test_empty(self):
- response_pb = _make_query_response()
- snapshot = self._call_fut(response_pb, None, None)
- self.assertIsNone(snapshot)
+ def test_get_partitions(self):
+ self._get_partitions_helper()
- def test_after_offset(self):
- skipped_results = 410
- response_pb = _make_query_response(skipped_results=skipped_results)
- snapshot = self._call_fut(response_pb, None, None)
- self.assertIsNone(snapshot)
+ def test_get_partitions_w_retry_timeout(self):
+ from google.api_core.retry import Retry
- def test_response(self):
- from google.cloud.firestore_v1.document import DocumentSnapshot
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_partitions_helper(retry=retry, timeout=timeout)
+ def test_get_partitions_w_filter(self):
+ # Make a **real** collection reference as parent.
client = _make_client()
- collection = client.collection("a", "b", "c")
- _, expected_prefix = collection._parent_info()
-
- # Create name for the protobuf.
- doc_id = "gigantic"
- name = "{}/{}".format(expected_prefix, doc_id)
- data = {"a": 901, "b": True}
- response_pb = _make_query_response(name=name, data=data)
-
- snapshot = self._call_fut(response_pb, collection, expected_prefix)
- self.assertIsInstance(snapshot, DocumentSnapshot)
- expected_path = collection._path + (doc_id,)
- self.assertEqual(snapshot.reference._path, expected_path)
- self.assertEqual(snapshot.to_dict(), data)
- self.assertTrue(snapshot.exists)
- self.assertEqual(snapshot.read_time, response_pb.read_time)
- self.assertEqual(snapshot.create_time, response_pb.document.create_time)
- self.assertEqual(snapshot.update_time, response_pb.document.update_time)
-
-
-class Test__collection_group_query_response_to_snapshot(unittest.TestCase):
- @staticmethod
- def _call_fut(response_pb, collection):
- from google.cloud.firestore_v1.query import (
- _collection_group_query_response_to_snapshot,
- )
-
- return _collection_group_query_response_to_snapshot(response_pb, collection)
+ parent = client.collection("charles")
- def test_empty(self):
- response_pb = _make_query_response()
- snapshot = self._call_fut(response_pb, None)
- self.assertIsNone(snapshot)
+ # Make a query that fails to partition
+ query = self._make_one(parent).where("foo", "==", "bar")
+ with pytest.raises(ValueError):
+ list(query.get_partitions(2))
- def test_after_offset(self):
- skipped_results = 410
- response_pb = _make_query_response(skipped_results=skipped_results)
- snapshot = self._call_fut(response_pb, None)
- self.assertIsNone(snapshot)
+ def test_get_partitions_w_projection(self):
+ # Make a **real** collection reference as parent.
+ client = _make_client()
+ parent = client.collection("charles")
- def test_response(self):
- from google.cloud.firestore_v1.document import DocumentSnapshot
+ # Make a query that fails to partition
+ query = self._make_one(parent).select("foo")
+ with pytest.raises(ValueError):
+ list(query.get_partitions(2))
+ def test_get_partitions_w_limit(self):
+ # Make a **real** collection reference as parent.
client = _make_client()
- collection = client.collection("a", "b", "c")
- other_collection = client.collection("a", "b", "d")
- to_match = other_collection.document("gigantic")
- data = {"a": 901, "b": True}
- response_pb = _make_query_response(name=to_match._document_path, data=data)
-
- snapshot = self._call_fut(response_pb, collection)
- self.assertIsInstance(snapshot, DocumentSnapshot)
- self.assertEqual(snapshot.reference._document_path, to_match._document_path)
- self.assertEqual(snapshot.to_dict(), data)
- self.assertTrue(snapshot.exists)
- self.assertEqual(snapshot.read_time, response_pb.read_time)
- self.assertEqual(snapshot.create_time, response_pb.document.create_time)
- self.assertEqual(snapshot.update_time, response_pb.document.update_time)
+ parent = client.collection("charles")
+ # Make a query that fails to partition
+ query = self._make_one(parent).limit(10)
+ with pytest.raises(ValueError):
+ list(query.get_partitions(2))
-def _make_credentials():
- import google.auth.credentials
+ def test_get_partitions_w_offset(self):
+ # Make a **real** collection reference as parent.
+ client = _make_client()
+ parent = client.collection("charles")
- return mock.Mock(spec=google.auth.credentials.Credentials)
+ # Make a query that fails to partition
+ query = self._make_one(parent).offset(10)
+ with pytest.raises(ValueError):
+ list(query.get_partitions(2))
def _make_client(project="project-project"):
@@ -1707,40 +591,3 @@ def _make_client(project="project-project"):
credentials = _make_credentials()
return Client(project=project, credentials=credentials)
-
-
-def _make_order_pb(field_path, direction):
- from google.cloud.firestore_v1.proto import query_pb2
-
- return query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- direction=direction,
- )
-
-
-def _make_query_response(**kwargs):
- # kwargs supported are ``skipped_results``, ``name`` and ``data``
- from google.cloud.firestore_v1.proto import document_pb2
- from google.cloud.firestore_v1.proto import firestore_pb2
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.firestore_v1 import _helpers
-
- now = datetime.datetime.utcnow()
- read_time = _datetime_to_pb_timestamp(now)
- kwargs["read_time"] = read_time
-
- name = kwargs.pop("name", None)
- data = kwargs.pop("data", None)
- if name is not None and data is not None:
- document_pb = document_pb2.Document(
- name=name, fields=_helpers.encode_dict(data)
- )
- delta = datetime.timedelta(seconds=100)
- update_time = _datetime_to_pb_timestamp(now - delta)
- create_time = _datetime_to_pb_timestamp(now - 2 * delta)
- document_pb.update_time.CopyFrom(update_time)
- document_pb.create_time.CopyFrom(create_time)
-
- kwargs["document"] = document_pb
-
- return firestore_pb2.RunQueryResponse(**kwargs)
diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py
index da3c2d0b02..3a093a335d 100644
--- a/tests/unit/v1/test_transaction.py
+++ b/tests/unit/v1/test_transaction.py
@@ -48,7 +48,7 @@ def test_constructor_explicit(self):
self.assertIsNone(transaction._id)
def test__add_write_pbs_failure(self):
- from google.cloud.firestore_v1.transaction import _WRITE_READ_ONLY
+ from google.cloud.firestore_v1.base_transaction import _WRITE_READ_ONLY
batch = self._make_one(mock.sentinel.client, read_only=True)
self.assertEqual(batch._write_pbs, [])
@@ -64,66 +64,29 @@ def test__add_write_pbs(self):
batch._add_write_pbs([mock.sentinel.write])
self.assertEqual(batch._write_pbs, [mock.sentinel.write])
- def test__options_protobuf_read_only(self):
- from google.cloud.firestore_v1.proto import common_pb2
-
- transaction = self._make_one(mock.sentinel.client, read_only=True)
- options_pb = transaction._options_protobuf(None)
- expected_pb = common_pb2.TransactionOptions(
- read_only=common_pb2.TransactionOptions.ReadOnly()
- )
- self.assertEqual(options_pb, expected_pb)
-
- def test__options_protobuf_read_only_retry(self):
- from google.cloud.firestore_v1.transaction import _CANT_RETRY_READ_ONLY
-
- transaction = self._make_one(mock.sentinel.client, read_only=True)
- retry_id = b"illuminate"
-
- with self.assertRaises(ValueError) as exc_info:
- transaction._options_protobuf(retry_id)
-
- self.assertEqual(exc_info.exception.args, (_CANT_RETRY_READ_ONLY,))
-
- def test__options_protobuf_read_write(self):
- transaction = self._make_one(mock.sentinel.client)
- options_pb = transaction._options_protobuf(None)
- self.assertIsNone(options_pb)
-
- def test__options_protobuf_on_retry(self):
- from google.cloud.firestore_v1.proto import common_pb2
-
+ def test__clean_up(self):
transaction = self._make_one(mock.sentinel.client)
- retry_id = b"hocus-pocus"
- options_pb = transaction._options_protobuf(retry_id)
- expected_pb = common_pb2.TransactionOptions(
- read_write=common_pb2.TransactionOptions.ReadWrite(
- retry_transaction=retry_id
- )
- )
- self.assertEqual(options_pb, expected_pb)
+ transaction._write_pbs.extend([mock.sentinel.write_pb1, mock.sentinel.write])
+ transaction._id = b"not-this-time-my-friend"
- def test_in_progress_property(self):
- transaction = self._make_one(mock.sentinel.client)
- self.assertFalse(transaction.in_progress)
- transaction._id = b"not-none-bites"
- self.assertTrue(transaction.in_progress)
+ ret_val = transaction._clean_up()
+ self.assertIsNone(ret_val)
- def test_id_property(self):
- transaction = self._make_one(mock.sentinel.client)
- transaction._id = mock.sentinel.eye_dee
- self.assertIs(transaction.id, mock.sentinel.eye_dee)
+ self.assertEqual(transaction._write_pbs, [])
+ self.assertIsNone(transaction._id)
def test__begin(self):
- from google.cloud.firestore_v1.gapic import firestore_client
- from google.cloud.firestore_v1.proto import firestore_pb2
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
+ from google.cloud.firestore_v1.types import firestore
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
firestore_client.FirestoreClient, instance=True
)
txn_id = b"to-begin"
- response = firestore_pb2.BeginTransactionResponse(transaction=txn_id)
+ response = firestore.BeginTransactionResponse(transaction=txn_id)
firestore_api.begin_transaction.return_value = response
# Attach the fake GAPIC to a real client.
@@ -140,11 +103,12 @@ def test__begin(self):
# Verify the called mock.
firestore_api.begin_transaction.assert_called_once_with(
- client._database_string, options_=None, metadata=client._rpc_metadata
+ request={"database": client._database_string, "options": None},
+ metadata=client._rpc_metadata,
)
def test__begin_failure(self):
- from google.cloud.firestore_v1.transaction import _CANT_BEGIN
+ from google.cloud.firestore_v1.base_transaction import _CANT_BEGIN
client = _make_client()
transaction = self._make_one(client)
@@ -156,22 +120,11 @@ def test__begin_failure(self):
err_msg = _CANT_BEGIN.format(transaction._id)
self.assertEqual(exc_info.exception.args, (err_msg,))
- def test__clean_up(self):
- transaction = self._make_one(mock.sentinel.client)
- transaction._write_pbs.extend(
- [mock.sentinel.write_pb1, mock.sentinel.write_pb2]
- )
- transaction._id = b"not-this-time-my-friend"
-
- ret_val = transaction._clean_up()
- self.assertIsNone(ret_val)
-
- self.assertEqual(transaction._write_pbs, [])
- self.assertIsNone(transaction._id)
-
def test__rollback(self):
from google.protobuf import empty_pb2
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
@@ -193,11 +146,12 @@ def test__rollback(self):
# Verify the called mock.
firestore_api.rollback.assert_called_once_with(
- client._database_string, txn_id, metadata=client._rpc_metadata
+ request={"database": client._database_string, "transaction": txn_id},
+ metadata=client._rpc_metadata,
)
def test__rollback_not_allowed(self):
- from google.cloud.firestore_v1.transaction import _CANT_ROLLBACK
+ from google.cloud.firestore_v1.base_transaction import _CANT_ROLLBACK
client = _make_client()
transaction = self._make_one(client)
@@ -210,7 +164,9 @@ def test__rollback_not_allowed(self):
def test__rollback_failure(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy failure.
firestore_api = mock.create_autospec(
@@ -237,21 +193,22 @@ def test__rollback_failure(self):
# Verify the called mock.
firestore_api.rollback.assert_called_once_with(
- client._database_string, txn_id, metadata=client._rpc_metadata
+ request={"database": client._database_string, "transaction": txn_id},
+ metadata=client._rpc_metadata,
)
def test__commit(self):
- from google.cloud.firestore_v1.gapic import firestore_client
- from google.cloud.firestore_v1.proto import firestore_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
firestore_client.FirestoreClient, instance=True
)
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult()]
- )
+ commit_response = firestore.CommitResponse(write_results=[write.WriteResult()])
firestore_api.commit.return_value = commit_response
# Attach the fake GAPIC to a real client.
@@ -274,14 +231,16 @@ def test__commit(self):
# Verify the mocks.
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
def test__commit_not_allowed(self):
- from google.cloud.firestore_v1.transaction import _CANT_COMMIT
+ from google.cloud.firestore_v1.base_transaction import _CANT_COMMIT
transaction = self._make_one(mock.sentinel.client)
self.assertIsNone(transaction._id)
@@ -292,7 +251,9 @@ def test__commit_not_allowed(self):
def test__commit_failure(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy failure.
firestore_api = mock.create_autospec(
@@ -322,40 +283,87 @@ def test__commit_failure(self):
# Verify the called mock.
firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
- def test_get_all(self):
+ def _get_all_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
+
client = mock.Mock(spec=["get_all"])
transaction = self._make_one(client)
ref1, ref2 = mock.Mock(), mock.Mock()
- result = transaction.get_all([ref1, ref2])
- client.get_all.assert_called_once_with([ref1, ref2], transaction=transaction)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ result = transaction.get_all([ref1, ref2], **kwargs)
+
+ client.get_all.assert_called_once_with(
+ [ref1, ref2], transaction=transaction, **kwargs,
+ )
self.assertIs(result, client.get_all.return_value)
- def test_get_document_ref(self):
+ def test_get_all(self):
+ self._get_all_helper()
+
+ def test_get_all_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_all_helper(retry=retry, timeout=timeout)
+
+ def _get_w_document_ref_helper(self, retry=None, timeout=None):
from google.cloud.firestore_v1.document import DocumentReference
+ from google.cloud.firestore_v1 import _helpers
client = mock.Mock(spec=["get_all"])
transaction = self._make_one(client)
ref = DocumentReference("documents", "doc-id")
- result = transaction.get(ref)
- client.get_all.assert_called_once_with([ref], transaction=transaction)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ result = transaction.get(ref, **kwargs)
+
self.assertIs(result, client.get_all.return_value)
+ client.get_all.assert_called_once_with([ref], transaction=transaction, **kwargs)
- def test_get_w_query(self):
+ def test_get_w_document_ref(self):
+ self._get_w_document_ref_helper()
+
+ def test_get_w_document_ref_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_w_document_ref_helper(retry=retry, timeout=timeout)
+
+ def _get_w_query_helper(self, retry=None, timeout=None):
+ from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.query import Query
client = mock.Mock(spec=[])
transaction = self._make_one(client)
query = Query(parent=mock.Mock(spec=[]))
query.stream = mock.MagicMock()
- result = transaction.get(query)
- query.stream.assert_called_once_with(transaction=transaction)
+ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+
+ result = transaction.get(query, **kwargs)
+
self.assertIs(result, query.stream.return_value)
+ query.stream.assert_called_once_with(transaction=transaction, **kwargs)
+
+ def test_get_w_query(self):
+ self._get_w_query_helper()
+
+ def test_get_w_query_w_retry_timeout(self):
+ from google.api_core.retry import Retry
+
+ retry = Retry(predicate=object())
+ timeout = 123.0
+ self._get_w_query_helper(retry=retry, timeout=timeout)
def test_get_failure(self):
client = _make_client()
@@ -382,17 +390,6 @@ def test_constructor(self):
self.assertIsNone(wrapped.current_id)
self.assertIsNone(wrapped.retry_id)
- def test__reset(self):
- wrapped = self._make_one(mock.sentinel.callable_)
- wrapped.current_id = b"not-none"
- wrapped.retry_id = b"also-not"
-
- ret_val = wrapped._reset()
- self.assertIsNone(ret_val)
-
- self.assertIsNone(wrapped.current_id)
- self.assertIsNone(wrapped.retry_id)
-
def test__pre_commit_success(self):
to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
@@ -410,15 +407,17 @@ def test__pre_commit_success(self):
to_wrap.assert_called_once_with(transaction, "pos", key="word")
firestore_api = transaction._client._firestore_api
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_not_called()
def test__pre_commit_retry_id_already_set_success(self):
- from google.cloud.firestore_v1.proto import common_pb2
+ from google.cloud.firestore_v1.types import common
to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
@@ -437,14 +436,14 @@ def test__pre_commit_retry_id_already_set_success(self):
# Verify mocks.
to_wrap.assert_called_once_with(transaction)
firestore_api = transaction._client._firestore_api
- options_ = common_pb2.TransactionOptions(
- read_write=common_pb2.TransactionOptions.ReadWrite(
- retry_transaction=txn_id1
- )
+ options_ = common.TransactionOptions(
+ read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id1)
)
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=options_,
+ request={
+ "database": transaction._client._database_string,
+ "options": options_,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_not_called()
@@ -469,13 +468,17 @@ def test__pre_commit_failure(self):
to_wrap.assert_called_once_with(transaction, 10, 20)
firestore_api = transaction._client._firestore_api
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_called_once_with(
- transaction._client._database_string,
- txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.commit.assert_not_called()
@@ -506,13 +509,17 @@ def test__pre_commit_failure_with_rollback_failure(self):
# Verify mocks.
to_wrap.assert_called_once_with(transaction, a="b", c="zebra")
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_called_once_with(
- transaction._client._database_string,
- txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.commit.assert_not_called()
@@ -534,9 +541,11 @@ def test__maybe_commit_success(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
@@ -569,9 +578,11 @@ def test__maybe_commit_failure_read_only(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
@@ -602,9 +613,11 @@ def test__maybe_commit_failure_can_retry(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
@@ -636,9 +649,11 @@ def test__maybe_commit_failure_cannot_retry(self):
firestore_api.begin_transaction.assert_not_called()
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
@@ -659,23 +674,24 @@ def test___call__success_first_attempt(self):
to_wrap.assert_called_once_with(transaction, "a", b="c")
firestore_api = transaction._client._firestore_api
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={"database": transaction._client._database_string, "options": None},
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_not_called()
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
def test___call__success_second_attempt(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1.proto import common_pb2
- from google.cloud.firestore_v1.proto import firestore_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.types import common
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
@@ -688,7 +704,7 @@ def test___call__success_second_attempt(self):
firestore_api = transaction._client._firestore_api
firestore_api.commit.side_effect = [
exc,
- firestore_pb2.CommitResponse(write_results=[write_pb2.WriteResult()]),
+ firestore.CommitResponse(write_results=[write.WriteResult()]),
]
# Call the __call__-able ``wrapped``.
@@ -704,31 +720,32 @@ def test___call__success_second_attempt(self):
self.assertEqual(to_wrap.mock_calls, [wrapped_call, wrapped_call])
firestore_api = transaction._client._firestore_api
db_str = transaction._client._database_string
- options_ = common_pb2.TransactionOptions(
- read_write=common_pb2.TransactionOptions.ReadWrite(retry_transaction=txn_id)
+ options_ = common.TransactionOptions(
+ read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id)
)
self.assertEqual(
firestore_api.begin_transaction.mock_calls,
[
mock.call(
- db_str, options_=None, metadata=transaction._client._rpc_metadata
+ request={"database": db_str, "options": None},
+ metadata=transaction._client._rpc_metadata,
),
mock.call(
- db_str,
- options_=options_,
+ request={"database": db_str, "options": options_},
metadata=transaction._client._rpc_metadata,
),
],
)
firestore_api.rollback.assert_not_called()
commit_call = mock.call(
- db_str, [], transaction=txn_id, metadata=transaction._client._rpc_metadata
+ request={"database": db_str, "writes": [], "transaction": txn_id},
+ metadata=transaction._client._rpc_metadata,
)
self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call])
def test___call__failure(self):
from google.api_core import exceptions
- from google.cloud.firestore_v1.transaction import _EXCEED_ATTEMPTS_TEMPLATE
+ from google.cloud.firestore_v1.base_transaction import _EXCEED_ATTEMPTS_TEMPLATE
to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[])
wrapped = self._make_one(to_wrap)
@@ -755,19 +772,25 @@ def test___call__failure(self):
# Verify mocks.
to_wrap.assert_called_once_with(transaction, "here", there=1.5)
firestore_api.begin_transaction.assert_called_once_with(
- transaction._client._database_string,
- options_=None,
+ request={
+ "database": transaction._client._database_string,
+ "options": None,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.rollback.assert_called_once_with(
- transaction._client._database_string,
- txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
firestore_api.commit.assert_called_once_with(
- transaction._client._database_string,
- [],
- transaction=txn_id,
+ request={
+ "database": transaction._client._database_string,
+ "writes": [],
+ "transaction": txn_id,
+ },
metadata=transaction._client._rpc_metadata,
)
@@ -796,7 +819,9 @@ def _call_fut(client, write_pbs, transaction_id):
@mock.patch("google.cloud.firestore_v1.transaction._sleep")
def test_success_first_attempt(self, _sleep):
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
@@ -815,16 +840,20 @@ def test_success_first_attempt(self, _sleep):
# Verify mocks used.
_sleep.assert_not_called()
firestore_api.commit.assert_called_once_with(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
@mock.patch("google.cloud.firestore_v1.transaction._sleep", side_effect=[2.0, 4.0])
def test_success_third_attempt(self, _sleep):
from google.api_core import exceptions
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
@@ -847,14 +876,17 @@ def test_success_third_attempt(self, _sleep):
self.assertIs(commit_response, mock.sentinel.commit_response)
# Verify mocks used.
+ # Ensure _sleep is called after commit failures, with intervals of 1 and 2 seconds
self.assertEqual(_sleep.call_count, 2)
_sleep.assert_any_call(1.0)
_sleep.assert_any_call(2.0)
# commit() called same way 3 times.
commit_call = mock.call(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
self.assertEqual(
@@ -864,7 +896,9 @@ def test_success_third_attempt(self, _sleep):
@mock.patch("google.cloud.firestore_v1.transaction._sleep")
def test_failure_first_attempt(self, _sleep):
from google.api_core import exceptions
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
@@ -888,16 +922,20 @@ def test_failure_first_attempt(self, _sleep):
# Verify mocks used.
_sleep.assert_not_called()
firestore_api.commit.assert_called_once_with(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
@mock.patch("google.cloud.firestore_v1.transaction._sleep", return_value=2.0)
def test_failure_second_attempt(self, _sleep):
from google.api_core import exceptions
- from google.cloud.firestore_v1.gapic import firestore_client
+ from google.cloud.firestore_v1.services.firestore import (
+ client as firestore_client,
+ )
# Create a minimal fake GAPIC with a dummy result.
firestore_api = mock.create_autospec(
@@ -924,9 +962,11 @@ def test_failure_second_attempt(self, _sleep):
_sleep.assert_called_once_with(1.0)
# commit() called same way 2 times.
commit_call = mock.call(
- client._database_string,
- mock.sentinel.write_pbs,
- transaction=txn_id,
+ request={
+ "database": client._database_string,
+ "writes": mock.sentinel.write_pbs,
+ "transaction": txn_id,
+ },
metadata=client._rpc_metadata,
)
self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call])
@@ -993,9 +1033,9 @@ def _make_client(project="feral-tom-cat"):
def _make_transaction(txn_id, **txn_kwargs):
from google.protobuf import empty_pb2
- from google.cloud.firestore_v1.gapic import firestore_client
- from google.cloud.firestore_v1.proto import firestore_pb2
- from google.cloud.firestore_v1.proto import write_pb2
+ from google.cloud.firestore_v1.services.firestore import client as firestore_client
+ from google.cloud.firestore_v1.types import firestore
+ from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.transaction import Transaction
# Create a fake GAPIC ...
@@ -1003,14 +1043,12 @@ def _make_transaction(txn_id, **txn_kwargs):
firestore_client.FirestoreClient, instance=True
)
# ... with a dummy ``BeginTransactionResponse`` result ...
- begin_response = firestore_pb2.BeginTransactionResponse(transaction=txn_id)
+ begin_response = firestore.BeginTransactionResponse(transaction=txn_id)
firestore_api.begin_transaction.return_value = begin_response
# ... and a dummy ``Rollback`` result ...
firestore_api.rollback.return_value = empty_pb2.Empty()
# ... and a dummy ``Commit`` result.
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult()]
- )
+ commit_response = firestore.CommitResponse(write_results=[write.WriteResult()])
firestore_api.commit.return_value = commit_response
# Attach the fake GAPIC to a real client.
diff --git a/tests/unit/v1/test_watch.py b/tests/unit/v1/test_watch.py
index 0778717bcc..759549b72a 100644
--- a/tests/unit/v1/test_watch.py
+++ b/tests/unit/v1/test_watch.py
@@ -1,7 +1,21 @@
+# Copyright 2020 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
import datetime
import unittest
import mock
-from google.cloud.firestore_v1.proto import firestore_pb2
+from google.cloud.firestore_v1.types import firestore
class TestWatchDocTree(unittest.TestCase):
@@ -199,17 +213,17 @@ def _snapshot_callback(self, docs, changes, read_time):
self.snapshotted = (docs, changes, read_time)
def test_ctor(self):
- from google.cloud.firestore_v1.proto import firestore_pb2
+ from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.watch import _should_recover
from google.cloud.firestore_v1.watch import _should_terminate
inst = self._makeOne()
self.assertTrue(inst._consumer.started)
self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done])
- self.assertIs(inst._rpc.start_rpc, inst._api.transport.listen)
+ self.assertIs(inst._rpc.start_rpc, inst._api._transport.listen)
self.assertIs(inst._rpc.should_recover, _should_recover)
self.assertIs(inst._rpc.should_terminate, _should_terminate)
- self.assertIsInstance(inst._rpc.initial_request, firestore_pb2.ListenRequest)
+ self.assertIsInstance(inst._rpc.initial_request, firestore.ListenRequest)
self.assertEqual(inst._rpc.metadata, DummyFirestore._rpc_metadata)
def test__on_rpc_done(self):
@@ -278,7 +292,7 @@ def test_for_query(self):
parent = DummyCollection(client)
modulename = "google.cloud.firestore_v1.watch"
pb2 = DummyPb2()
- with mock.patch("%s.firestore_pb2" % modulename, pb2):
+ with mock.patch("%s.firestore" % modulename, pb2):
with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc):
with mock.patch(
"%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer
@@ -306,7 +320,7 @@ def test_for_query_nested(self):
parent = DummyCollection(client, parent=grandparent)
modulename = "google.cloud.firestore_v1.watch"
pb2 = DummyPb2()
- with mock.patch("%s.firestore_pb2" % modulename, pb2):
+ with mock.patch("%s.firestore" % modulename, pb2):
with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc):
with mock.patch(
"%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer
@@ -352,7 +366,9 @@ def push(read_time, next_resume_token):
def test_on_snapshot_target_add(self):
inst = self._makeOne()
proto = DummyProto()
- proto.target_change.target_change_type = firestore_pb2.TargetChange.ADD
+ proto.target_change.target_change_type = (
+ firestore.TargetChange.TargetChangeType.ADD
+ )
proto.target_change.target_ids = [1] # not "Py"
with self.assertRaises(Exception) as exc:
inst.on_snapshot(proto)
@@ -362,7 +378,9 @@ def test_on_snapshot_target_remove(self):
inst = self._makeOne()
proto = DummyProto()
target_change = proto.target_change
- target_change.target_change_type = firestore_pb2.TargetChange.REMOVE
+ target_change.target_change_type = (
+ firestore.TargetChange.TargetChangeType.REMOVE
+ )
with self.assertRaises(Exception) as exc:
inst.on_snapshot(proto)
self.assertEqual(str(exc.exception), "Error 1: hi")
@@ -372,7 +390,9 @@ def test_on_snapshot_target_remove_nocause(self):
proto = DummyProto()
target_change = proto.target_change
target_change.cause = None
- target_change.target_change_type = firestore_pb2.TargetChange.REMOVE
+ target_change.target_change_type = (
+ firestore.TargetChange.TargetChangeType.REMOVE
+ )
with self.assertRaises(Exception) as exc:
inst.on_snapshot(proto)
self.assertEqual(str(exc.exception), "Error 13: internal error")
@@ -386,7 +406,7 @@ def reset():
inst._reset_docs = reset
proto = DummyProto()
target_change = proto.target_change
- target_change.target_change_type = firestore_pb2.TargetChange.RESET
+ target_change.target_change_type = firestore.TargetChange.TargetChangeType.RESET
inst.on_snapshot(proto)
self.assertTrue(inst._docs_reset)
@@ -395,7 +415,9 @@ def test_on_snapshot_target_current(self):
inst.current = False
proto = DummyProto()
target_change = proto.target_change
- target_change.target_change_type = firestore_pb2.TargetChange.CURRENT
+ target_change.target_change_type = (
+ firestore.TargetChange.TargetChangeType.CURRENT
+ )
inst.on_snapshot(proto)
self.assertTrue(inst.current)
@@ -546,14 +568,12 @@ def test_on_snapshot_unknown_listen_type(self):
def test_push_callback_called_no_changes(self):
import pytz
- class DummyReadTime(object):
- seconds = 1534858278
+ dummy_time = (datetime.datetime.fromtimestamp(1534858278, pytz.utc),)
inst = self._makeOne()
- inst.push(DummyReadTime, "token")
+ inst.push(dummy_time, "token")
self.assertEqual(
- self.snapshotted,
- ([], [], datetime.datetime.fromtimestamp(DummyReadTime.seconds, pytz.utc)),
+ self.snapshotted, ([], [], dummy_time),
)
self.assertTrue(inst.has_pushed)
self.assertEqual(inst.resume_token, "token")
@@ -790,7 +810,7 @@ def Listen(self): # pragma: NO COVER
class DummyFirestoreClient(object):
def __init__(self):
- self.transport = mock.Mock(_stubs={"firestore_stub": DummyFirestoreStub()})
+ self._transport = mock.Mock(_stubs={"firestore_stub": DummyFirestoreStub()})
class DummyDocumentReference(object):
@@ -850,6 +870,9 @@ class DummyFirestore(object):
_database_string = "abc://bar/"
_rpc_metadata = None
+ def ListenRequest(self, **kw): # pragma: NO COVER
+ pass
+
def document(self, *document_path): # pragma: NO COVER
if len(document_path) == 1:
path = document_path[0].split("/")
@@ -950,7 +973,7 @@ def __init__(self):
self.target_ids = []
self.removed_target_ids = []
self.read_time = 0
- self.target_change_type = firestore_pb2.TargetChange.NO_CHANGE
+ self.target_change_type = firestore.TargetChange.TargetChangeType.NO_CHANGE
self.resume_token = None
self.cause = DummyCause()
@@ -964,6 +987,12 @@ def __init__(self):
class DummyTarget(object):
def QueryTarget(self, **kw):
self.kw = kw
+ return DummyQueryTarget()
+
+
+class DummyQueryTarget(object):
+ @property
+ def _pb(self):
return "dummy query target"
diff --git a/tests/unit/v1/testdata/create-all-transforms.json b/tests/unit/v1/testdata/create-all-transforms.json
index 82831624bb..6389599987 100644
--- a/tests/unit/v1/testdata/create-all-transforms.json
+++ b/tests/unit/v1/testdata/create-all-transforms.json
@@ -20,50 +20,45 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-arrayremove-multi.json b/tests/unit/v1/testdata/create-arrayremove-multi.json
index 548a983808..331a53bf9c 100644
--- a/tests/unit/v1/testdata/create-arrayremove-multi.json
+++ b/tests/unit/v1/testdata/create-arrayremove-multi.json
@@ -20,46 +20,41 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "c.d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-arrayremove-nested.json b/tests/unit/v1/testdata/create-arrayremove-nested.json
index fa01bd7e00..00c73d05cc 100644
--- a/tests/unit/v1/testdata/create-arrayremove-nested.json
+++ b/tests/unit/v1/testdata/create-arrayremove-nested.json
@@ -20,30 +20,25 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-arrayremove.json b/tests/unit/v1/testdata/create-arrayremove.json
index a69be14b7b..646e259f6f 100644
--- a/tests/unit/v1/testdata/create-arrayremove.json
+++ b/tests/unit/v1/testdata/create-arrayremove.json
@@ -20,30 +20,25 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-arrayunion-multi.json b/tests/unit/v1/testdata/create-arrayunion-multi.json
index 7ca9852f48..5ba324f429 100644
--- a/tests/unit/v1/testdata/create-arrayunion-multi.json
+++ b/tests/unit/v1/testdata/create-arrayunion-multi.json
@@ -20,46 +20,41 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "c.d",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-arrayunion-nested.json b/tests/unit/v1/testdata/create-arrayunion-nested.json
index a2f20299d3..2a21509004 100644
--- a/tests/unit/v1/testdata/create-arrayunion-nested.json
+++ b/tests/unit/v1/testdata/create-arrayunion-nested.json
@@ -20,30 +20,25 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-arrayunion.json b/tests/unit/v1/testdata/create-arrayunion.json
index 26d0799466..99a75feded 100644
--- a/tests/unit/v1/testdata/create-arrayunion.json
+++ b/tests/unit/v1/testdata/create-arrayunion.json
@@ -20,30 +20,25 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-st-alone.json b/tests/unit/v1/testdata/create-st-alone.json
index 20c5e8ec32..177293906b 100644
--- a/tests/unit/v1/testdata/create-st-alone.json
+++ b/tests/unit/v1/testdata/create-st-alone.json
@@ -10,18 +10,19 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- },
"currentDocument": {
"exists": false
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-st-multi.json b/tests/unit/v1/testdata/create-st-multi.json
index 89430e2b64..41f3cd811c 100644
--- a/tests/unit/v1/testdata/create-st-multi.json
+++ b/tests/unit/v1/testdata/create-st-multi.json
@@ -20,22 +20,17 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c.d",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c.d",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-st-nested.json b/tests/unit/v1/testdata/create-st-nested.json
index f2a3a8d1f6..7316d916f4 100644
--- a/tests/unit/v1/testdata/create-st-nested.json
+++ b/tests/unit/v1/testdata/create-st-nested.json
@@ -20,18 +20,13 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-st-with-empty-map.json b/tests/unit/v1/testdata/create-st-with-empty-map.json
index 730afd154f..b638a0c9db 100644
--- a/tests/unit/v1/testdata/create-st-with-empty-map.json
+++ b/tests/unit/v1/testdata/create-st-with-empty-map.json
@@ -28,18 +28,13 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/create-st.json b/tests/unit/v1/testdata/create-st.json
index 705f76ed16..c4ad4be46b 100644
--- a/tests/unit/v1/testdata/create-st.json
+++ b/tests/unit/v1/testdata/create-st.json
@@ -20,18 +20,13 @@
},
"currentDocument": {
"exists": false
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/query-invalid-operator.json b/tests/unit/v1/testdata/query-invalid-operator.json
index 064164dc0d..c53e5c2bdf 100644
--- a/tests/unit/v1/testdata/query-invalid-operator.json
+++ b/tests/unit/v1/testdata/query-invalid-operator.json
@@ -2,7 +2,7 @@
"tests": [
{
"description": "query: invalid operator in Where clause",
- "comment": "The != operator is not supported.",
+ "comment": "The |~| operator is not supported.",
"query": {
"collPath": "projects/projectID/databases/(default)/documents/C",
"clauses": [
@@ -13,7 +13,7 @@
"a"
]
},
- "op": "!=",
+ "op": "|~|",
"jsonValue": "4"
}
}
diff --git a/tests/unit/v1/testdata/set-all-transforms.json b/tests/unit/v1/testdata/set-all-transforms.json
index 5c8b1373d4..a26b51b007 100644
--- a/tests/unit/v1/testdata/set-all-transforms.json
+++ b/tests/unit/v1/testdata/set-all-transforms.json
@@ -17,50 +17,45 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ }
+ },
+ {
+ "fieldPath": "d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-arrayremove-multi.json b/tests/unit/v1/testdata/set-arrayremove-multi.json
index 3ea9b0dbd8..dc2ace22f8 100644
--- a/tests/unit/v1/testdata/set-arrayremove-multi.json
+++ b/tests/unit/v1/testdata/set-arrayremove-multi.json
@@ -17,46 +17,41 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ }
+ },
+ {
+ "fieldPath": "c.d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-arrayremove-nested.json b/tests/unit/v1/testdata/set-arrayremove-nested.json
index 4db133f2c5..1e25b8f26b 100644
--- a/tests/unit/v1/testdata/set-arrayremove-nested.json
+++ b/tests/unit/v1/testdata/set-arrayremove-nested.json
@@ -17,30 +17,25 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-arrayremove.json b/tests/unit/v1/testdata/set-arrayremove.json
index 18969ef80a..e0506b22be 100644
--- a/tests/unit/v1/testdata/set-arrayremove.json
+++ b/tests/unit/v1/testdata/set-arrayremove.json
@@ -17,30 +17,25 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-arrayunion-multi.json b/tests/unit/v1/testdata/set-arrayunion-multi.json
index 3d076397c5..502d7dc7df 100644
--- a/tests/unit/v1/testdata/set-arrayunion-multi.json
+++ b/tests/unit/v1/testdata/set-arrayunion-multi.json
@@ -17,46 +17,41 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ }
+ },
+ {
+ "fieldPath": "c.d",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-arrayunion-nested.json b/tests/unit/v1/testdata/set-arrayunion-nested.json
index e265f6c613..7084e6bcd9 100644
--- a/tests/unit/v1/testdata/set-arrayunion-nested.json
+++ b/tests/unit/v1/testdata/set-arrayunion-nested.json
@@ -17,30 +17,25 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-arrayunion.json b/tests/unit/v1/testdata/set-arrayunion.json
index 856e075173..af12b33dd0 100644
--- a/tests/unit/v1/testdata/set-arrayunion.json
+++ b/tests/unit/v1/testdata/set-arrayunion.json
@@ -17,30 +17,25 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-alone-mergeall.json b/tests/unit/v1/testdata/set-st-alone-mergeall.json
index d95bf0973b..f6b60af810 100644
--- a/tests/unit/v1/testdata/set-st-alone-mergeall.json
+++ b/tests/unit/v1/testdata/set-st-alone-mergeall.json
@@ -13,15 +13,19 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-alone.json b/tests/unit/v1/testdata/set-st-alone.json
index 3fe931394b..1d28fd6f18 100644
--- a/tests/unit/v1/testdata/set-st-alone.json
+++ b/tests/unit/v1/testdata/set-st-alone.json
@@ -13,18 +13,13 @@
"update": {
"name": "projects/projectID/databases/(default)/documents/C/d",
"fields": {}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-merge-both.json b/tests/unit/v1/testdata/set-st-merge-both.json
index a39ada55f7..359c899a1e 100644
--- a/tests/unit/v1/testdata/set-st-merge-both.json
+++ b/tests/unit/v1/testdata/set-st-merge-both.json
@@ -36,18 +36,13 @@
"fieldPaths": [
"a"
]
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.json b/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.json
index 4193b00ea6..5af99ab0a5 100644
--- a/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.json
+++ b/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.json
@@ -26,18 +26,13 @@
"fieldPaths": [
"h"
]
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "h.g",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "h.g",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-merge-nonleaf.json b/tests/unit/v1/testdata/set-st-merge-nonleaf.json
index 5e91d663b8..e66ca87bf8 100644
--- a/tests/unit/v1/testdata/set-st-merge-nonleaf.json
+++ b/tests/unit/v1/testdata/set-st-merge-nonleaf.json
@@ -37,18 +37,13 @@
"fieldPaths": [
"h"
]
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "h.g",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "h.g",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-merge-nowrite.json b/tests/unit/v1/testdata/set-st-merge-nowrite.json
index 08fa8b52f5..44091b1276 100644
--- a/tests/unit/v1/testdata/set-st-merge-nowrite.json
+++ b/tests/unit/v1/testdata/set-st-merge-nowrite.json
@@ -19,15 +19,19 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-mergeall.json b/tests/unit/v1/testdata/set-st-mergeall.json
index 26883c0382..f913d69e61 100644
--- a/tests/unit/v1/testdata/set-st-mergeall.json
+++ b/tests/unit/v1/testdata/set-st-mergeall.json
@@ -25,18 +25,13 @@
"fieldPaths": [
"a"
]
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-multi.json b/tests/unit/v1/testdata/set-st-multi.json
index 23c06f4976..03200729ca 100644
--- a/tests/unit/v1/testdata/set-st-multi.json
+++ b/tests/unit/v1/testdata/set-st-multi.json
@@ -17,22 +17,17 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c.d",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c.d",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-nested.json b/tests/unit/v1/testdata/set-st-nested.json
index 5c94c33f94..58406e80b3 100644
--- a/tests/unit/v1/testdata/set-st-nested.json
+++ b/tests/unit/v1/testdata/set-st-nested.json
@@ -17,18 +17,13 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st-with-empty-map.json b/tests/unit/v1/testdata/set-st-with-empty-map.json
index 063c94a0e6..a407866537 100644
--- a/tests/unit/v1/testdata/set-st-with-empty-map.json
+++ b/tests/unit/v1/testdata/set-st-with-empty-map.json
@@ -25,18 +25,13 @@
}
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/set-st.json b/tests/unit/v1/testdata/set-st.json
index 42f2b14f1c..3e55ae111b 100644
--- a/tests/unit/v1/testdata/set-st.json
+++ b/tests/unit/v1/testdata/set-st.json
@@ -17,18 +17,13 @@
"integerValue": "1"
}
}
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-all-transforms.json b/tests/unit/v1/testdata/update-all-transforms.json
index 6f6a725df0..72b16d3a1b 100644
--- a/tests/unit/v1/testdata/update-all-transforms.json
+++ b/tests/unit/v1/testdata/update-all-transforms.json
@@ -25,50 +25,45 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayremove-alone.json b/tests/unit/v1/testdata/update-arrayremove-alone.json
index 86fc8802e5..93b8ff0528 100644
--- a/tests/unit/v1/testdata/update-arrayremove-alone.json
+++ b/tests/unit/v1/testdata/update-arrayremove-alone.json
@@ -10,31 +10,35 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- }
- ]
- },
"currentDocument": {
"exists": true
- }
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ }
+ }
+ ]
+ }
]
}
}
diff --git a/tests/unit/v1/testdata/update-arrayremove-multi.json b/tests/unit/v1/testdata/update-arrayremove-multi.json
index df880f6792..18ed0fddea 100644
--- a/tests/unit/v1/testdata/update-arrayremove-multi.json
+++ b/tests/unit/v1/testdata/update-arrayremove-multi.json
@@ -26,46 +26,41 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "c.d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayremove-nested.json b/tests/unit/v1/testdata/update-arrayremove-nested.json
index 28d59aff66..7159797c77 100644
--- a/tests/unit/v1/testdata/update-arrayremove-nested.json
+++ b/tests/unit/v1/testdata/update-arrayremove-nested.json
@@ -26,30 +26,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayremove.json b/tests/unit/v1/testdata/update-arrayremove.json
index d925704db6..2311f916de 100644
--- a/tests/unit/v1/testdata/update-arrayremove.json
+++ b/tests/unit/v1/testdata/update-arrayremove.json
@@ -25,30 +25,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayunion-alone.json b/tests/unit/v1/testdata/update-arrayunion-alone.json
index 757ea48c3b..5cb08579cb 100644
--- a/tests/unit/v1/testdata/update-arrayunion-alone.json
+++ b/tests/unit/v1/testdata/update-arrayunion-alone.json
@@ -10,30 +10,34 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- }
- ]
- },
"currentDocument": {
"exists": true
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ },
+ "fieldPath": "a"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayunion-multi.json b/tests/unit/v1/testdata/update-arrayunion-multi.json
index 3aafcd0f35..674ce2b4c2 100644
--- a/tests/unit/v1/testdata/update-arrayunion-multi.json
+++ b/tests/unit/v1/testdata/update-arrayunion-multi.json
@@ -26,46 +26,41 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "c.d",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayunion-nested.json b/tests/unit/v1/testdata/update-arrayunion-nested.json
index f2bf3770dc..841ceed0ac 100644
--- a/tests/unit/v1/testdata/update-arrayunion-nested.json
+++ b/tests/unit/v1/testdata/update-arrayunion-nested.json
@@ -26,30 +26,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-arrayunion.json b/tests/unit/v1/testdata/update-arrayunion.json
index 60192c9f8c..0aca2356c1 100644
--- a/tests/unit/v1/testdata/update-arrayunion.json
+++ b/tests/unit/v1/testdata/update-arrayunion.json
@@ -25,30 +25,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-nested-transform-and-nested-value.json b/tests/unit/v1/testdata/update-nested-transform-and-nested-value.json
index ff7bfc6ee9..2ccba0985a 100644
--- a/tests/unit/v1/testdata/update-nested-transform-and-nested-value.json
+++ b/tests/unit/v1/testdata/update-nested-transform-and-nested-value.json
@@ -31,18 +31,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-all-transforms.json b/tests/unit/v1/testdata/update-paths-all-transforms.json
index 01a4c1143d..40adbcaf56 100644
--- a/tests/unit/v1/testdata/update-paths-all-transforms.json
+++ b/tests/unit/v1/testdata/update-paths-all-transforms.json
@@ -52,50 +52,45 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayremove-alone.json b/tests/unit/v1/testdata/update-paths-arrayremove-alone.json
index 9bc8a14401..4097f58885 100644
--- a/tests/unit/v1/testdata/update-paths-arrayremove-alone.json
+++ b/tests/unit/v1/testdata/update-paths-arrayremove-alone.json
@@ -19,30 +19,34 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- }
- ]
- },
"currentDocument": {
"exists": true
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayremove-multi.json b/tests/unit/v1/testdata/update-paths-arrayremove-multi.json
index 9a8547120e..5e76d07bac 100644
--- a/tests/unit/v1/testdata/update-paths-arrayremove-multi.json
+++ b/tests/unit/v1/testdata/update-paths-arrayremove-multi.json
@@ -47,46 +47,41 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "c.d",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayremove-nested.json b/tests/unit/v1/testdata/update-paths-arrayremove-nested.json
index e7f952ec34..9ee1b2a6fe 100644
--- a/tests/unit/v1/testdata/update-paths-arrayremove-nested.json
+++ b/tests/unit/v1/testdata/update-paths-arrayremove-nested.json
@@ -41,30 +41,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayremove.json b/tests/unit/v1/testdata/update-paths-arrayremove.json
index 673a2ca2c1..a7be888daf 100644
--- a/tests/unit/v1/testdata/update-paths-arrayremove.json
+++ b/tests/unit/v1/testdata/update-paths-arrayremove.json
@@ -40,30 +40,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "removeAllFromArray": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "removeAllFromArray": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayunion-alone.json b/tests/unit/v1/testdata/update-paths-arrayunion-alone.json
index 81e1e9771a..2375d0cedb 100644
--- a/tests/unit/v1/testdata/update-paths-arrayunion-alone.json
+++ b/tests/unit/v1/testdata/update-paths-arrayunion-alone.json
@@ -19,30 +19,34 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- }
- ]
- },
"currentDocument": {
"exists": true
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
+ },
+ "fieldPath": "a"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayunion-multi.json b/tests/unit/v1/testdata/update-paths-arrayunion-multi.json
index ef421bdad1..afb6437417 100644
--- a/tests/unit/v1/testdata/update-paths-arrayunion-multi.json
+++ b/tests/unit/v1/testdata/update-paths-arrayunion-multi.json
@@ -47,46 +47,41 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
- },
- {
- "fieldPath": "c.d",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "4"
- },
- {
- "integerValue": "5"
- },
- {
- "integerValue": "6"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ },
+ {
+ "fieldPath": "c.d",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "4"
+ },
+ {
+ "integerValue": "5"
+ },
+ {
+ "integerValue": "6"
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayunion-nested.json b/tests/unit/v1/testdata/update-paths-arrayunion-nested.json
index 2d73527a40..d908d02055 100644
--- a/tests/unit/v1/testdata/update-paths-arrayunion-nested.json
+++ b/tests/unit/v1/testdata/update-paths-arrayunion-nested.json
@@ -41,30 +41,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-arrayunion.json b/tests/unit/v1/testdata/update-paths-arrayunion.json
index 1401993d05..ed2966aede 100644
--- a/tests/unit/v1/testdata/update-paths-arrayunion.json
+++ b/tests/unit/v1/testdata/update-paths-arrayunion.json
@@ -40,30 +40,25 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "appendMissingElements": {
- "values": [
- {
- "integerValue": "1"
- },
- {
- "integerValue": "2"
- },
- {
- "integerValue": "3"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "appendMissingElements": {
+ "values": [
+ {
+ "integerValue": "1"
+ },
+ {
+ "integerValue": "2"
+ },
+ {
+ "integerValue": "3"
+ }
+ ]
}
- ]
- }
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-nested-transform-and-nested-value.json b/tests/unit/v1/testdata/update-paths-nested-transform-and-nested-value.json
index 927d783aee..c4dead09e0 100644
--- a/tests/unit/v1/testdata/update-paths-nested-transform-and-nested-value.json
+++ b/tests/unit/v1/testdata/update-paths-nested-transform-and-nested-value.json
@@ -48,18 +48,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-st-alone.json b/tests/unit/v1/testdata/update-paths-st-alone.json
index 085d049877..668c1c932b 100644
--- a/tests/unit/v1/testdata/update-paths-st-alone.json
+++ b/tests/unit/v1/testdata/update-paths-st-alone.json
@@ -19,18 +19,22 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- },
"currentDocument": {
"exists": true
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-st-multi.json b/tests/unit/v1/testdata/update-paths-st-multi.json
index 2d813801ac..8767cf3497 100644
--- a/tests/unit/v1/testdata/update-paths-st-multi.json
+++ b/tests/unit/v1/testdata/update-paths-st-multi.json
@@ -47,22 +47,17 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c.d",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c.d",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-st-nested.json b/tests/unit/v1/testdata/update-paths-st-nested.json
index 8bd35c9111..94ecaccaa4 100644
--- a/tests/unit/v1/testdata/update-paths-st-nested.json
+++ b/tests/unit/v1/testdata/update-paths-st-nested.json
@@ -41,18 +41,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-st-with-empty-map.json b/tests/unit/v1/testdata/update-paths-st-with-empty-map.json
index ac60b2771d..a86ae46cd1 100644
--- a/tests/unit/v1/testdata/update-paths-st-with-empty-map.json
+++ b/tests/unit/v1/testdata/update-paths-st-with-empty-map.json
@@ -40,20 +40,15 @@
"a"
]
},
+ "updateTransforms": [
+ {
+ "fieldPath": "a.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ],
"currentDocument": {
"exists": true
}
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
}
]
}
diff --git a/tests/unit/v1/testdata/update-paths-st.json b/tests/unit/v1/testdata/update-paths-st.json
index 011405b9bf..1710508b2d 100644
--- a/tests/unit/v1/testdata/update-paths-st.json
+++ b/tests/unit/v1/testdata/update-paths-st.json
@@ -40,18 +40,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-st-alone.json b/tests/unit/v1/testdata/update-st-alone.json
index 1a333f30cb..49fab17691 100644
--- a/tests/unit/v1/testdata/update-st-alone.json
+++ b/tests/unit/v1/testdata/update-st-alone.json
@@ -10,18 +10,22 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- },
"currentDocument": {
"exists": true
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-st-dot.json b/tests/unit/v1/testdata/update-st-dot.json
index 83422ca527..8b9a769021 100644
--- a/tests/unit/v1/testdata/update-st-dot.json
+++ b/tests/unit/v1/testdata/update-st-dot.json
@@ -10,18 +10,22 @@
"database": "projects/projectID/databases/(default)",
"writes": [
{
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.b.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- },
"currentDocument": {
"exists": true
- }
+ },
+ "update": {
+ "fields": {},
+ "name": "projects/projectID/databases/(default)/documents/C/d"
+ },
+ "updateMask": {
+ "fieldPaths": []
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a.b.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-st-multi.json b/tests/unit/v1/testdata/update-st-multi.json
index 8105ec27f5..f474112b63 100644
--- a/tests/unit/v1/testdata/update-st-multi.json
+++ b/tests/unit/v1/testdata/update-st-multi.json
@@ -26,22 +26,17 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- },
- {
- "fieldPath": "c.d",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ },
+ {
+ "fieldPath": "c.d",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-st-nested.json b/tests/unit/v1/testdata/update-st-nested.json
index 5a8e73237c..fa9f46b49f 100644
--- a/tests/unit/v1/testdata/update-st-nested.json
+++ b/tests/unit/v1/testdata/update-st-nested.json
@@ -26,18 +26,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-st-with-empty-map.json b/tests/unit/v1/testdata/update-st-with-empty-map.json
index abeceb03ea..4a2c27dfb0 100644
--- a/tests/unit/v1/testdata/update-st-with-empty-map.json
+++ b/tests/unit/v1/testdata/update-st-with-empty-map.json
@@ -33,18 +33,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "a.c",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "a.c",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1/testdata/update-st.json b/tests/unit/v1/testdata/update-st.json
index 6249d8bda9..71d17f3c7a 100644
--- a/tests/unit/v1/testdata/update-st.json
+++ b/tests/unit/v1/testdata/update-st.json
@@ -25,18 +25,13 @@
},
"currentDocument": {
"exists": true
- }
- },
- {
- "transform": {
- "document": "projects/projectID/databases/(default)/documents/C/d",
- "fieldTransforms": [
- {
- "fieldPath": "b",
- "setToServerValue": "REQUEST_TIME"
- }
- ]
- }
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b",
+ "setToServerValue": "REQUEST_TIME"
+ }
+ ]
}
]
}
diff --git a/tests/unit/v1beta1/test__helpers.py b/tests/unit/v1beta1/test__helpers.py
deleted file mode 100644
index 3059482cd0..0000000000
--- a/tests/unit/v1beta1/test__helpers.py
+++ /dev/null
@@ -1,2089 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import sys
-import unittest
-
-import mock
-import pytest
-
-
-class TestGeoPoint(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1._helpers import GeoPoint
-
- return GeoPoint
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_constructor(self):
- lat = 81.25
- lng = 359.984375
- geo_pt = self._make_one(lat, lng)
- self.assertEqual(geo_pt.latitude, lat)
- self.assertEqual(geo_pt.longitude, lng)
-
- def test_to_protobuf(self):
- from google.type import latlng_pb2
-
- lat = 0.015625
- lng = 20.03125
- geo_pt = self._make_one(lat, lng)
- result = geo_pt.to_protobuf()
- geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
- self.assertEqual(result, geo_pt_pb)
-
- def test___eq__(self):
- lat = 0.015625
- lng = 20.03125
- geo_pt1 = self._make_one(lat, lng)
- geo_pt2 = self._make_one(lat, lng)
- self.assertEqual(geo_pt1, geo_pt2)
-
- def test___eq__type_differ(self):
- lat = 0.015625
- lng = 20.03125
- geo_pt1 = self._make_one(lat, lng)
- geo_pt2 = object()
- self.assertNotEqual(geo_pt1, geo_pt2)
- self.assertIs(geo_pt1.__eq__(geo_pt2), NotImplemented)
-
- def test___ne__same_value(self):
- lat = 0.015625
- lng = 20.03125
- geo_pt1 = self._make_one(lat, lng)
- geo_pt2 = self._make_one(lat, lng)
- comparison_val = geo_pt1 != geo_pt2
- self.assertFalse(comparison_val)
-
- def test___ne__(self):
- geo_pt1 = self._make_one(0.0, 1.0)
- geo_pt2 = self._make_one(2.0, 3.0)
- self.assertNotEqual(geo_pt1, geo_pt2)
-
- def test___ne__type_differ(self):
- lat = 0.015625
- lng = 20.03125
- geo_pt1 = self._make_one(lat, lng)
- geo_pt2 = object()
- self.assertNotEqual(geo_pt1, geo_pt2)
- self.assertIs(geo_pt1.__ne__(geo_pt2), NotImplemented)
-
-
-class Test_verify_path(unittest.TestCase):
- @staticmethod
- def _call_fut(path, is_collection):
- from google.cloud.firestore_v1beta1._helpers import verify_path
-
- return verify_path(path, is_collection)
-
- def test_empty(self):
- path = ()
- with self.assertRaises(ValueError):
- self._call_fut(path, True)
- with self.assertRaises(ValueError):
- self._call_fut(path, False)
-
- def test_wrong_length_collection(self):
- path = ("foo", "bar")
- with self.assertRaises(ValueError):
- self._call_fut(path, True)
-
- def test_wrong_length_document(self):
- path = ("Kind",)
- with self.assertRaises(ValueError):
- self._call_fut(path, False)
-
- def test_wrong_type_collection(self):
- path = (99, "ninety-nine", "zap")
- with self.assertRaises(ValueError):
- self._call_fut(path, True)
-
- def test_wrong_type_document(self):
- path = ("Users", "Ada", "Candy", {})
- with self.assertRaises(ValueError):
- self._call_fut(path, False)
-
- def test_success_collection(self):
- path = ("Computer", "Magic", "Win")
- ret_val = self._call_fut(path, True)
- # NOTE: We are just checking that it didn't fail.
- self.assertIsNone(ret_val)
-
- def test_success_document(self):
- path = ("Tokenizer", "Seventeen", "Cheese", "Burger")
- ret_val = self._call_fut(path, False)
- # NOTE: We are just checking that it didn't fail.
- self.assertIsNone(ret_val)
-
-
-class Test_encode_value(unittest.TestCase):
- @staticmethod
- def _call_fut(value):
- from google.cloud.firestore_v1beta1._helpers import encode_value
-
- return encode_value(value)
-
- def test_none(self):
- from google.protobuf import struct_pb2
-
- result = self._call_fut(None)
- expected = _value_pb(null_value=struct_pb2.NULL_VALUE)
- self.assertEqual(result, expected)
-
- def test_boolean(self):
- result = self._call_fut(True)
- expected = _value_pb(boolean_value=True)
- self.assertEqual(result, expected)
-
- def test_integer(self):
- value = 425178
- result = self._call_fut(value)
- expected = _value_pb(integer_value=value)
- self.assertEqual(result, expected)
-
- def test_float(self):
- value = 123.4453125
- result = self._call_fut(value)
- expected = _value_pb(double_value=value)
- self.assertEqual(result, expected)
-
- def test_datetime_with_nanos(self):
- from google.api_core.datetime_helpers import DatetimeWithNanoseconds
- from google.protobuf import timestamp_pb2
-
- dt_seconds = 1488768504
- dt_nanos = 458816991
- timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos)
- dt_val = DatetimeWithNanoseconds.from_timestamp_pb(timestamp_pb)
-
- result = self._call_fut(dt_val)
- expected = _value_pb(timestamp_value=timestamp_pb)
- self.assertEqual(result, expected)
-
- def test_datetime_wo_nanos(self):
- from google.protobuf import timestamp_pb2
-
- dt_seconds = 1488768504
- dt_nanos = 458816000
- # Make sure precision is valid in microseconds too.
- self.assertEqual(dt_nanos % 1000, 0)
- dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos)
-
- result = self._call_fut(dt_val)
- timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos)
- expected = _value_pb(timestamp_value=timestamp_pb)
- self.assertEqual(result, expected)
-
- def test_string(self):
- value = u"\u2018left quote, right quote\u2019"
- result = self._call_fut(value)
- expected = _value_pb(string_value=value)
- self.assertEqual(result, expected)
-
- def test_bytes(self):
- value = b"\xe3\xf2\xff\x00"
- result = self._call_fut(value)
- expected = _value_pb(bytes_value=value)
- self.assertEqual(result, expected)
-
- def test_reference_value(self):
- client = _make_client()
-
- value = client.document("my", "friend")
- result = self._call_fut(value)
- expected = _value_pb(reference_value=value._document_path)
- self.assertEqual(result, expected)
-
- def test_geo_point(self):
- from google.cloud.firestore_v1beta1._helpers import GeoPoint
-
- value = GeoPoint(50.5, 88.75)
- result = self._call_fut(value)
- expected = _value_pb(geo_point_value=value.to_protobuf())
- self.assertEqual(result, expected)
-
- def test_array(self):
- from google.cloud.firestore_v1beta1.proto.document_pb2 import ArrayValue
-
- result = self._call_fut([99, True, 118.5])
-
- array_pb = ArrayValue(
- values=[
- _value_pb(integer_value=99),
- _value_pb(boolean_value=True),
- _value_pb(double_value=118.5),
- ]
- )
- expected = _value_pb(array_value=array_pb)
- self.assertEqual(result, expected)
-
- def test_map(self):
- from google.cloud.firestore_v1beta1.proto.document_pb2 import MapValue
-
- result = self._call_fut({"abc": 285, "def": b"piglatin"})
-
- map_pb = MapValue(
- fields={
- "abc": _value_pb(integer_value=285),
- "def": _value_pb(bytes_value=b"piglatin"),
- }
- )
- expected = _value_pb(map_value=map_pb)
- self.assertEqual(result, expected)
-
- def test_bad_type(self):
- value = object()
- with self.assertRaises(TypeError):
- self._call_fut(value)
-
-
-class Test_encode_dict(unittest.TestCase):
- @staticmethod
- def _call_fut(values_dict):
- from google.cloud.firestore_v1beta1._helpers import encode_dict
-
- return encode_dict(values_dict)
-
- def test_many_types(self):
- from google.protobuf import struct_pb2
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto.document_pb2 import ArrayValue
- from google.cloud.firestore_v1beta1.proto.document_pb2 import MapValue
-
- dt_seconds = 1497397225
- dt_nanos = 465964000
- # Make sure precision is valid in microseconds too.
- self.assertEqual(dt_nanos % 1000, 0)
- dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos)
-
- client = _make_client()
- document = client.document("most", "adjective", "thing", "here")
-
- values_dict = {
- "foo": None,
- "bar": True,
- "baz": 981,
- "quux": 2.875,
- "quuz": dt_val,
- "corge": u"\N{snowman}",
- "grault": b"\xe2\x98\x83",
- "wibble": document,
- "garply": [u"fork", 4.0],
- "waldo": {"fred": u"zap", "thud": False},
- }
- encoded_dict = self._call_fut(values_dict)
- expected_dict = {
- "foo": _value_pb(null_value=struct_pb2.NULL_VALUE),
- "bar": _value_pb(boolean_value=True),
- "baz": _value_pb(integer_value=981),
- "quux": _value_pb(double_value=2.875),
- "quuz": _value_pb(
- timestamp_value=timestamp_pb2.Timestamp(
- seconds=dt_seconds, nanos=dt_nanos
- )
- ),
- "corge": _value_pb(string_value=u"\N{snowman}"),
- "grault": _value_pb(bytes_value=b"\xe2\x98\x83"),
- "wibble": _value_pb(reference_value=document._document_path),
- "garply": _value_pb(
- array_value=ArrayValue(
- values=[
- _value_pb(string_value=u"fork"),
- _value_pb(double_value=4.0),
- ]
- )
- ),
- "waldo": _value_pb(
- map_value=MapValue(
- fields={
- "fred": _value_pb(string_value=u"zap"),
- "thud": _value_pb(boolean_value=False),
- }
- )
- ),
- }
- self.assertEqual(encoded_dict, expected_dict)
-
-
-class Test_reference_value_to_document(unittest.TestCase):
- @staticmethod
- def _call_fut(reference_value, client):
- from google.cloud.firestore_v1beta1._helpers import reference_value_to_document
-
- return reference_value_to_document(reference_value, client)
-
- def test_bad_format(self):
- from google.cloud.firestore_v1beta1._helpers import BAD_REFERENCE_ERROR
-
- reference_value = "not/the/right/format"
- with self.assertRaises(ValueError) as exc_info:
- self._call_fut(reference_value, None)
-
- err_msg = BAD_REFERENCE_ERROR.format(reference_value)
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
- def test_same_client(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- client = _make_client()
- document = client.document("that", "this")
- reference_value = document._document_path
-
- new_document = self._call_fut(reference_value, client)
- self.assertIsNot(new_document, document)
-
- self.assertIsInstance(new_document, DocumentReference)
- self.assertIs(new_document._client, client)
- self.assertEqual(new_document._path, document._path)
-
- def test_different_client(self):
- from google.cloud.firestore_v1beta1._helpers import WRONG_APP_REFERENCE
-
- client1 = _make_client(project="kirk")
- document = client1.document("tin", "foil")
- reference_value = document._document_path
-
- client2 = _make_client(project="spock")
- with self.assertRaises(ValueError) as exc_info:
- self._call_fut(reference_value, client2)
-
- err_msg = WRONG_APP_REFERENCE.format(reference_value, client2._database_string)
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
-
-class Test_decode_value(unittest.TestCase):
- @staticmethod
- def _call_fut(value, client=mock.sentinel.client):
- from google.cloud.firestore_v1beta1._helpers import decode_value
-
- return decode_value(value, client)
-
- def test_none(self):
- from google.protobuf import struct_pb2
-
- value = _value_pb(null_value=struct_pb2.NULL_VALUE)
- self.assertIsNone(self._call_fut(value))
-
- def test_bool(self):
- value1 = _value_pb(boolean_value=True)
- self.assertTrue(self._call_fut(value1))
- value2 = _value_pb(boolean_value=False)
- self.assertFalse(self._call_fut(value2))
-
- def test_int(self):
- int_val = 29871
- value = _value_pb(integer_value=int_val)
- self.assertEqual(self._call_fut(value), int_val)
-
- def test_float(self):
- float_val = 85.9296875
- value = _value_pb(double_value=float_val)
- self.assertEqual(self._call_fut(value), float_val)
-
- @unittest.skipIf(
- (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python"
- )
- def test_datetime(self):
- from google.api_core.datetime_helpers import DatetimeWithNanoseconds
- from google.protobuf import timestamp_pb2
-
- dt_seconds = 552855006
- dt_nanos = 766961828
-
- timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos)
- value = _value_pb(timestamp_value=timestamp_pb)
-
- expected_dt_val = DatetimeWithNanoseconds.from_timestamp_pb(timestamp_pb)
- self.assertEqual(self._call_fut(value), expected_dt_val)
-
- def test_unicode(self):
- unicode_val = u"zorgon"
- value = _value_pb(string_value=unicode_val)
- self.assertEqual(self._call_fut(value), unicode_val)
-
- def test_bytes(self):
- bytes_val = b"abc\x80"
- value = _value_pb(bytes_value=bytes_val)
- self.assertEqual(self._call_fut(value), bytes_val)
-
- def test_reference(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- client = _make_client()
- path = (u"then", u"there-was-one")
- document = client.document(*path)
- ref_string = document._document_path
- value = _value_pb(reference_value=ref_string)
-
- result = self._call_fut(value, client)
- self.assertIsInstance(result, DocumentReference)
- self.assertIs(result._client, client)
- self.assertEqual(result._path, path)
-
- def test_geo_point(self):
- from google.cloud.firestore_v1beta1._helpers import GeoPoint
-
- geo_pt = GeoPoint(latitude=42.5, longitude=99.0625)
- value = _value_pb(geo_point_value=geo_pt.to_protobuf())
- self.assertEqual(self._call_fut(value), geo_pt)
-
- def test_array(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
-
- sub_value1 = _value_pb(boolean_value=True)
- sub_value2 = _value_pb(double_value=14.1396484375)
- sub_value3 = _value_pb(bytes_value=b"\xde\xad\xbe\xef")
- array_pb = document_pb2.ArrayValue(values=[sub_value1, sub_value2, sub_value3])
- value = _value_pb(array_value=array_pb)
-
- expected = [
- sub_value1.boolean_value,
- sub_value2.double_value,
- sub_value3.bytes_value,
- ]
- self.assertEqual(self._call_fut(value), expected)
-
- def test_map(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
-
- sub_value1 = _value_pb(integer_value=187680)
- sub_value2 = _value_pb(string_value=u"how low can you go?")
- map_pb = document_pb2.MapValue(
- fields={"first": sub_value1, "second": sub_value2}
- )
- value = _value_pb(map_value=map_pb)
-
- expected = {
- "first": sub_value1.integer_value,
- "second": sub_value2.string_value,
- }
- self.assertEqual(self._call_fut(value), expected)
-
- def test_nested_map(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
-
- actual_value1 = 1009876
- actual_value2 = u"hey you guys"
- actual_value3 = 90.875
- map_pb1 = document_pb2.MapValue(
- fields={
- "lowest": _value_pb(integer_value=actual_value1),
- "aside": _value_pb(string_value=actual_value2),
- }
- )
- map_pb2 = document_pb2.MapValue(
- fields={
- "middle": _value_pb(map_value=map_pb1),
- "aside": _value_pb(boolean_value=True),
- }
- )
- map_pb3 = document_pb2.MapValue(
- fields={
- "highest": _value_pb(map_value=map_pb2),
- "aside": _value_pb(double_value=actual_value3),
- }
- )
- value = _value_pb(map_value=map_pb3)
-
- expected = {
- "highest": {
- "middle": {"lowest": actual_value1, "aside": actual_value2},
- "aside": True,
- },
- "aside": actual_value3,
- }
- self.assertEqual(self._call_fut(value), expected)
-
- def test_unset_value_type(self):
- with self.assertRaises(ValueError):
- self._call_fut(_value_pb())
-
- def test_unknown_value_type(self):
- value_pb = mock.Mock(spec=["WhichOneof"])
- value_pb.WhichOneof.return_value = "zoob_value"
-
- with self.assertRaises(ValueError):
- self._call_fut(value_pb)
-
- value_pb.WhichOneof.assert_called_once_with("value_type")
-
-
-class Test_decode_dict(unittest.TestCase):
- @staticmethod
- def _call_fut(value_fields, client=mock.sentinel.client):
- from google.cloud.firestore_v1beta1._helpers import decode_dict
-
- return decode_dict(value_fields, client)
-
- @unittest.skipIf(
- (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python"
- )
- def test_many_types(self):
- from google.protobuf import struct_pb2
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto.document_pb2 import ArrayValue
- from google.cloud.firestore_v1beta1.proto.document_pb2 import MapValue
- from google.cloud._helpers import UTC
- from google.cloud.firestore_v1beta1.field_path import FieldPath
-
- dt_seconds = 1394037350
- dt_nanos = 667285000
- # Make sure precision is valid in microseconds too.
- self.assertEqual(dt_nanos % 1000, 0)
- dt_val = datetime.datetime.utcfromtimestamp(
- dt_seconds + 1e-9 * dt_nanos
- ).replace(tzinfo=UTC)
-
- value_fields = {
- "foo": _value_pb(null_value=struct_pb2.NULL_VALUE),
- "bar": _value_pb(boolean_value=True),
- "baz": _value_pb(integer_value=981),
- "quux": _value_pb(double_value=2.875),
- "quuz": _value_pb(
- timestamp_value=timestamp_pb2.Timestamp(
- seconds=dt_seconds, nanos=dt_nanos
- )
- ),
- "corge": _value_pb(string_value=u"\N{snowman}"),
- "grault": _value_pb(bytes_value=b"\xe2\x98\x83"),
- "garply": _value_pb(
- array_value=ArrayValue(
- values=[
- _value_pb(string_value=u"fork"),
- _value_pb(double_value=4.0),
- ]
- )
- ),
- "waldo": _value_pb(
- map_value=MapValue(
- fields={
- "fred": _value_pb(string_value=u"zap"),
- "thud": _value_pb(boolean_value=False),
- }
- )
- ),
- FieldPath("a", "b", "c").to_api_repr(): _value_pb(boolean_value=False),
- }
- expected = {
- "foo": None,
- "bar": True,
- "baz": 981,
- "quux": 2.875,
- "quuz": dt_val,
- "corge": u"\N{snowman}",
- "grault": b"\xe2\x98\x83",
- "garply": [u"fork", 4.0],
- "waldo": {"fred": u"zap", "thud": False},
- "a.b.c": False,
- }
- self.assertEqual(self._call_fut(value_fields), expected)
-
-
-class Test_get_doc_id(unittest.TestCase):
- @staticmethod
- def _call_fut(document_pb, expected_prefix):
- from google.cloud.firestore_v1beta1._helpers import get_doc_id
-
- return get_doc_id(document_pb, expected_prefix)
-
- @staticmethod
- def _dummy_ref_string(collection_id):
- from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE
-
- project = u"bazzzz"
- return u"projects/{}/databases/{}/documents/{}".format(
- project, DEFAULT_DATABASE, collection_id
- )
-
- def test_success(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
-
- prefix = self._dummy_ref_string("sub-collection")
- actual_id = "this-is-the-one"
- name = "{}/{}".format(prefix, actual_id)
-
- document_pb = document_pb2.Document(name=name)
- document_id = self._call_fut(document_pb, prefix)
- self.assertEqual(document_id, actual_id)
-
- def test_failure(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
-
- actual_prefix = self._dummy_ref_string("the-right-one")
- wrong_prefix = self._dummy_ref_string("the-wrong-one")
- name = "{}/{}".format(actual_prefix, "sorry-wont-works")
-
- document_pb = document_pb2.Document(name=name)
- with self.assertRaises(ValueError) as exc_info:
- self._call_fut(document_pb, wrong_prefix)
-
- exc_args = exc_info.exception.args
- self.assertEqual(len(exc_args), 4)
- self.assertEqual(exc_args[1], name)
- self.assertEqual(exc_args[3], wrong_prefix)
-
-
-class Test_extract_fields(unittest.TestCase):
- @staticmethod
- def _call_fut(document_data, prefix_path, expand_dots=False):
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.extract_fields(
- document_data, prefix_path, expand_dots=expand_dots
- )
-
- def test_w_empty_document(self):
- from google.cloud.firestore_v1beta1._helpers import _EmptyDict
-
- document_data = {}
- prefix_path = _make_field_path()
- expected = [(_make_field_path(), _EmptyDict)]
-
- iterator = self._call_fut(document_data, prefix_path)
- self.assertEqual(list(iterator), expected)
-
- def test_w_invalid_key_and_expand_dots(self):
- document_data = {"b": 1, "a~d": 2, "c": 3}
- prefix_path = _make_field_path()
-
- with self.assertRaises(ValueError):
- list(self._call_fut(document_data, prefix_path, expand_dots=True))
-
- def test_w_shallow_keys(self):
- document_data = {"b": 1, "a": 2, "c": 3}
- prefix_path = _make_field_path()
- expected = [
- (_make_field_path("a"), 2),
- (_make_field_path("b"), 1),
- (_make_field_path("c"), 3),
- ]
-
- iterator = self._call_fut(document_data, prefix_path)
- self.assertEqual(list(iterator), expected)
-
- def test_w_nested(self):
- from google.cloud.firestore_v1beta1._helpers import _EmptyDict
-
- document_data = {"b": {"a": {"d": 4, "c": 3, "g": {}}, "e": 7}, "f": 5}
- prefix_path = _make_field_path()
- expected = [
- (_make_field_path("b", "a", "c"), 3),
- (_make_field_path("b", "a", "d"), 4),
- (_make_field_path("b", "a", "g"), _EmptyDict),
- (_make_field_path("b", "e"), 7),
- (_make_field_path("f"), 5),
- ]
-
- iterator = self._call_fut(document_data, prefix_path)
- self.assertEqual(list(iterator), expected)
-
- def test_w_expand_dotted(self):
- from google.cloud.firestore_v1beta1._helpers import _EmptyDict
-
- document_data = {
- "b": {"a": {"d": 4, "c": 3, "g": {}, "k.l.m": 17}, "e": 7},
- "f": 5,
- "h.i.j": 9,
- }
- prefix_path = _make_field_path()
- expected = [
- (_make_field_path("b", "a", "c"), 3),
- (_make_field_path("b", "a", "d"), 4),
- (_make_field_path("b", "a", "g"), _EmptyDict),
- (_make_field_path("b", "a", "k.l.m"), 17),
- (_make_field_path("b", "e"), 7),
- (_make_field_path("f"), 5),
- (_make_field_path("h", "i", "j"), 9),
- ]
-
- iterator = self._call_fut(document_data, prefix_path, expand_dots=True)
- self.assertEqual(list(iterator), expected)
-
-
-class Test_set_field_value(unittest.TestCase):
- @staticmethod
- def _call_fut(document_data, field_path, value):
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.set_field_value(document_data, field_path, value)
-
- def test_normal_value_w_shallow(self):
- document = {}
- field_path = _make_field_path("a")
- value = 3
-
- self._call_fut(document, field_path, value)
-
- self.assertEqual(document, {"a": 3})
-
- def test_normal_value_w_nested(self):
- document = {}
- field_path = _make_field_path("a", "b", "c")
- value = 3
-
- self._call_fut(document, field_path, value)
-
- self.assertEqual(document, {"a": {"b": {"c": 3}}})
-
- def test_empty_dict_w_shallow(self):
- from google.cloud.firestore_v1beta1._helpers import _EmptyDict
-
- document = {}
- field_path = _make_field_path("a")
- value = _EmptyDict
-
- self._call_fut(document, field_path, value)
-
- self.assertEqual(document, {"a": {}})
-
- def test_empty_dict_w_nested(self):
- from google.cloud.firestore_v1beta1._helpers import _EmptyDict
-
- document = {}
- field_path = _make_field_path("a", "b", "c")
- value = _EmptyDict
-
- self._call_fut(document, field_path, value)
-
- self.assertEqual(document, {"a": {"b": {"c": {}}}})
-
-
-class Test_get_field_value(unittest.TestCase):
- @staticmethod
- def _call_fut(document_data, field_path):
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.get_field_value(document_data, field_path)
-
- def test_w_empty_path(self):
- document = {}
-
- with self.assertRaises(ValueError):
- self._call_fut(document, _make_field_path())
-
- def test_miss_shallow(self):
- document = {}
-
- with self.assertRaises(KeyError):
- self._call_fut(document, _make_field_path("nonesuch"))
-
- def test_miss_nested(self):
- document = {"a": {"b": {}}}
-
- with self.assertRaises(KeyError):
- self._call_fut(document, _make_field_path("a", "b", "c"))
-
- def test_hit_shallow(self):
- document = {"a": 1}
-
- self.assertEqual(self._call_fut(document, _make_field_path("a")), 1)
-
- def test_hit_nested(self):
- document = {"a": {"b": {"c": 1}}}
-
- self.assertEqual(self._call_fut(document, _make_field_path("a", "b", "c")), 1)
-
-
-class TestDocumentExtractor(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.DocumentExtractor
-
- def _make_one(self, document_data):
- return self._get_target_class()(document_data)
-
- def test_ctor_w_empty_document(self):
- document_data = {}
-
- inst = self._make_one(document_data)
-
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertTrue(inst.empty_document)
- self.assertFalse(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [])
-
- def test_ctor_w_delete_field_shallow(self):
- from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-
- document_data = {"a": DELETE_FIELD}
-
- inst = self._make_one(document_data)
-
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [_make_field_path("a")])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertFalse(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [])
-
- def test_ctor_w_delete_field_nested(self):
- from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-
- document_data = {"a": {"b": {"c": DELETE_FIELD}}}
-
- inst = self._make_one(document_data)
-
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [_make_field_path("a", "b", "c")])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertFalse(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [])
-
- def test_ctor_w_server_timestamp_shallow(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_data = {"a": SERVER_TIMESTAMP}
-
- inst = self._make_one(document_data)
-
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [_make_field_path("a")])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertTrue(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [_make_field_path("a")])
-
- def test_ctor_w_server_timestamp_nested(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}}
-
- inst = self._make_one(document_data)
-
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [_make_field_path("a", "b", "c")])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertTrue(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")])
-
- def test_ctor_w_array_remove_shallow(self):
- from google.cloud.firestore_v1beta1.transforms import ArrayRemove
-
- values = [1, 3, 5]
- document_data = {"a": ArrayRemove(values)}
-
- inst = self._make_one(document_data)
-
- expected_array_removes = {_make_field_path("a"): values}
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, expected_array_removes)
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertTrue(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [_make_field_path("a")])
-
- def test_ctor_w_array_remove_nested(self):
- from google.cloud.firestore_v1beta1.transforms import ArrayRemove
-
- values = [2, 4, 8]
- document_data = {"a": {"b": {"c": ArrayRemove(values)}}}
-
- inst = self._make_one(document_data)
-
- expected_array_removes = {_make_field_path("a", "b", "c"): values}
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, expected_array_removes)
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertTrue(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")])
-
- def test_ctor_w_array_union_shallow(self):
- from google.cloud.firestore_v1beta1.transforms import ArrayUnion
-
- values = [1, 3, 5]
- document_data = {"a": ArrayUnion(values)}
-
- inst = self._make_one(document_data)
-
- expected_array_unions = {_make_field_path("a"): values}
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, expected_array_unions)
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertTrue(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [_make_field_path("a")])
-
- def test_ctor_w_array_union_nested(self):
- from google.cloud.firestore_v1beta1.transforms import ArrayUnion
-
- values = [2, 4, 8]
- document_data = {"a": {"b": {"c": ArrayUnion(values)}}}
-
- inst = self._make_one(document_data)
-
- expected_array_unions = {_make_field_path("a", "b", "c"): values}
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, [])
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, expected_array_unions)
- self.assertEqual(inst.set_fields, {})
- self.assertFalse(inst.empty_document)
- self.assertTrue(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")])
-
- def test_ctor_w_empty_dict_shallow(self):
- document_data = {"a": {}}
-
- inst = self._make_one(document_data)
-
- expected_field_paths = [_make_field_path("a")]
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, expected_field_paths)
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, document_data)
- self.assertFalse(inst.empty_document)
- self.assertFalse(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [])
-
- def test_ctor_w_empty_dict_nested(self):
- document_data = {"a": {"b": {"c": {}}}}
-
- inst = self._make_one(document_data)
-
- expected_field_paths = [_make_field_path("a", "b", "c")]
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, expected_field_paths)
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, document_data)
- self.assertFalse(inst.empty_document)
- self.assertFalse(inst.has_transforms)
- self.assertEqual(inst.transform_paths, [])
-
- def test_ctor_w_normal_value_shallow(self):
- document_data = {"b": 1, "a": 2, "c": 3}
-
- inst = self._make_one(document_data)
-
- expected_field_paths = [
- _make_field_path("a"),
- _make_field_path("b"),
- _make_field_path("c"),
- ]
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, expected_field_paths)
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, document_data)
- self.assertFalse(inst.empty_document)
- self.assertFalse(inst.has_transforms)
-
- def test_ctor_w_normal_value_nested(self):
- document_data = {"b": {"a": {"d": 4, "c": 3}, "e": 7}, "f": 5}
-
- inst = self._make_one(document_data)
-
- expected_field_paths = [
- _make_field_path("b", "a", "c"),
- _make_field_path("b", "a", "d"),
- _make_field_path("b", "e"),
- _make_field_path("f"),
- ]
- self.assertEqual(inst.document_data, document_data)
- self.assertEqual(inst.field_paths, expected_field_paths)
- self.assertEqual(inst.deleted_fields, [])
- self.assertEqual(inst.server_timestamps, [])
- self.assertEqual(inst.array_removes, {})
- self.assertEqual(inst.array_unions, {})
- self.assertEqual(inst.set_fields, document_data)
- self.assertFalse(inst.empty_document)
- self.assertFalse(inst.has_transforms)
-
- def test_get_update_pb_w_exists_precondition(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- document_data = {}
- inst = self._make_one(document_data)
- document_path = (
- "projects/project-id/databases/(default)/" "documents/document-id"
- )
-
- update_pb = inst.get_update_pb(document_path, exists=False)
-
- self.assertIsInstance(update_pb, write_pb2.Write)
- self.assertEqual(update_pb.update.name, document_path)
- self.assertEqual(update_pb.update.fields, document_data)
- self.assertTrue(update_pb.HasField("current_document"))
- self.assertFalse(update_pb.current_document.exists)
-
- def test_get_update_pb_wo_exists_precondition(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1._helpers import encode_dict
-
- document_data = {"a": 1}
- inst = self._make_one(document_data)
- document_path = (
- "projects/project-id/databases/(default)/" "documents/document-id"
- )
-
- update_pb = inst.get_update_pb(document_path)
-
- self.assertIsInstance(update_pb, write_pb2.Write)
- self.assertEqual(update_pb.update.name, document_path)
- self.assertEqual(update_pb.update.fields, encode_dict(document_data))
- self.assertFalse(update_pb.HasField("current_document"))
-
- def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
- from google.cloud.firestore_v1beta1._helpers import REQUEST_TIME_ENUM
-
- document_data = {"a": SERVER_TIMESTAMP}
- inst = self._make_one(document_data)
- document_path = (
- "projects/project-id/databases/(default)/" "documents/document-id"
- )
-
- transform_pb = inst.get_transform_pb(document_path, exists=False)
-
- self.assertIsInstance(transform_pb, write_pb2.Write)
- self.assertEqual(transform_pb.transform.document, document_path)
- transforms = transform_pb.transform.field_transforms
- self.assertEqual(len(transforms), 1)
- transform = transforms[0]
- self.assertEqual(transform.field_path, "a")
- self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM)
- self.assertTrue(transform_pb.HasField("current_document"))
- self.assertFalse(transform_pb.current_document.exists)
-
- def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
- from google.cloud.firestore_v1beta1._helpers import REQUEST_TIME_ENUM
-
- document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}}
- inst = self._make_one(document_data)
- document_path = (
- "projects/project-id/databases/(default)/" "documents/document-id"
- )
-
- transform_pb = inst.get_transform_pb(document_path)
-
- self.assertIsInstance(transform_pb, write_pb2.Write)
- self.assertEqual(transform_pb.transform.document, document_path)
- transforms = transform_pb.transform.field_transforms
- self.assertEqual(len(transforms), 1)
- transform = transforms[0]
- self.assertEqual(transform.field_path, "a.b.c")
- self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM)
- self.assertFalse(transform_pb.HasField("current_document"))
-
- @staticmethod
- def _array_value_to_list(array_value):
- from google.cloud.firestore_v1beta1._helpers import decode_value
-
- return [decode_value(element, client=None) for element in array_value.values]
-
- def test_get_transform_pb_w_array_remove(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.transforms import ArrayRemove
-
- values = [2, 4, 8]
- document_data = {"a": {"b": {"c": ArrayRemove(values)}}}
- inst = self._make_one(document_data)
- document_path = (
- "projects/project-id/databases/(default)/" "documents/document-id"
- )
-
- transform_pb = inst.get_transform_pb(document_path)
-
- self.assertIsInstance(transform_pb, write_pb2.Write)
- self.assertEqual(transform_pb.transform.document, document_path)
- transforms = transform_pb.transform.field_transforms
- self.assertEqual(len(transforms), 1)
- transform = transforms[0]
- self.assertEqual(transform.field_path, "a.b.c")
- removed = self._array_value_to_list(transform.remove_all_from_array)
- self.assertEqual(removed, values)
- self.assertFalse(transform_pb.HasField("current_document"))
-
- def test_get_transform_pb_w_array_union(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.transforms import ArrayUnion
-
- values = [1, 3, 5]
- document_data = {"a": {"b": {"c": ArrayUnion(values)}}}
- inst = self._make_one(document_data)
- document_path = (
- "projects/project-id/databases/(default)/" "documents/document-id"
- )
-
- transform_pb = inst.get_transform_pb(document_path)
-
- self.assertIsInstance(transform_pb, write_pb2.Write)
- self.assertEqual(transform_pb.transform.document, document_path)
- transforms = transform_pb.transform.field_transforms
- self.assertEqual(len(transforms), 1)
- transform = transforms[0]
- self.assertEqual(transform.field_path, "a.b.c")
- added = self._array_value_to_list(transform.append_missing_elements)
- self.assertEqual(added, values)
- self.assertFalse(transform_pb.HasField("current_document"))
-
-
-class Test_pbs_for_create(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, document_data):
- from google.cloud.firestore_v1beta1._helpers import pbs_for_create
-
- return pbs_for_create(document_path, document_data)
-
- @staticmethod
- def _make_write_w_document(document_path, **data):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1._helpers import encode_dict
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
- return write_pb2.Write(
- update=document_pb2.Document(name=document_path, fields=encode_dict(data)),
- current_document=common_pb2.Precondition(exists=False),
- )
-
- @staticmethod
- def _make_write_w_transform(document_path, fields):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.gapic import enums
-
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- transforms = [
- write_pb2.DocumentTransform.FieldTransform(
- field_path=field, set_to_server_value=server_val.REQUEST_TIME
- )
- for field in fields
- ]
-
- return write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=transforms
- )
- )
-
- def _helper(self, do_transform=False, empty_val=False):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- document_data = {"cheese": 1.5, "crackers": True}
-
- if do_transform:
- document_data["butter"] = SERVER_TIMESTAMP
-
- if empty_val:
- document_data["mustard"] = {}
-
- write_pbs = self._call_fut(document_path, document_data)
-
- if empty_val:
- update_pb = self._make_write_w_document(
- document_path, cheese=1.5, crackers=True, mustard={}
- )
- else:
- update_pb = self._make_write_w_document(
- document_path, cheese=1.5, crackers=True
- )
- expected_pbs = [update_pb]
-
- if do_transform:
- expected_pbs.append(
- self._make_write_w_transform(document_path, fields=["butter"])
- )
-
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_without_transform(self):
- self._helper()
-
- def test_w_transform(self):
- self._helper(do_transform=True)
-
- def test_w_transform_and_empty_value(self):
- self._helper(do_transform=True, empty_val=True)
-
-
-class Test_pbs_for_set_no_merge(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, document_data):
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.pbs_for_set_no_merge(document_path, document_data)
-
- @staticmethod
- def _make_write_w_document(document_path, **data):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1._helpers import encode_dict
-
- return write_pb2.Write(
- update=document_pb2.Document(name=document_path, fields=encode_dict(data))
- )
-
- @staticmethod
- def _make_write_w_transform(document_path, fields):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.gapic import enums
-
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- transforms = [
- write_pb2.DocumentTransform.FieldTransform(
- field_path=field, set_to_server_value=server_val.REQUEST_TIME
- )
- for field in fields
- ]
-
- return write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=transforms
- )
- )
-
- def test_w_empty_document(self):
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- document_data = {}
-
- write_pbs = self._call_fut(document_path, document_data)
-
- update_pb = self._make_write_w_document(document_path)
- expected_pbs = [update_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_w_only_server_timestamp(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- document_data = {"butter": SERVER_TIMESTAMP}
-
- write_pbs = self._call_fut(document_path, document_data)
-
- update_pb = self._make_write_w_document(document_path)
- transform_pb = self._make_write_w_transform(document_path, ["butter"])
- expected_pbs = [update_pb, transform_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def _helper(self, do_transform=False, empty_val=False):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- document_data = {"cheese": 1.5, "crackers": True}
-
- if do_transform:
- document_data["butter"] = SERVER_TIMESTAMP
-
- if empty_val:
- document_data["mustard"] = {}
-
- write_pbs = self._call_fut(document_path, document_data)
-
- if empty_val:
- update_pb = self._make_write_w_document(
- document_path, cheese=1.5, crackers=True, mustard={}
- )
- else:
- update_pb = self._make_write_w_document(
- document_path, cheese=1.5, crackers=True
- )
- expected_pbs = [update_pb]
-
- if do_transform:
- expected_pbs.append(
- self._make_write_w_transform(document_path, fields=["butter"])
- )
-
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_defaults(self):
- self._helper()
-
- def test_w_transform(self):
- self._helper(do_transform=True)
-
- def test_w_transform_and_empty_value(self):
- # Exercise #5944
- self._helper(do_transform=True, empty_val=True)
-
-
-class TestDocumentExtractorForMerge(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.DocumentExtractorForMerge
-
- def _make_one(self, document_data):
- return self._get_target_class()(document_data)
-
- def test_ctor_w_empty_document(self):
- document_data = {}
-
- inst = self._make_one(document_data)
-
- self.assertEqual(inst.data_merge, [])
- self.assertEqual(inst.transform_merge, [])
- self.assertEqual(inst.merge, [])
-
- def test_apply_merge_all_w_empty_document(self):
- document_data = {}
- inst = self._make_one(document_data)
-
- inst.apply_merge(True)
-
- self.assertEqual(inst.data_merge, [])
- self.assertEqual(inst.transform_merge, [])
- self.assertEqual(inst.merge, [])
- self.assertFalse(inst.has_updates)
-
- def test_apply_merge_all_w_delete(self):
- from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-
- document_data = {"write_me": "value", "delete_me": DELETE_FIELD}
- inst = self._make_one(document_data)
-
- inst.apply_merge(True)
-
- expected_data_merge = [
- _make_field_path("delete_me"),
- _make_field_path("write_me"),
- ]
- self.assertEqual(inst.data_merge, expected_data_merge)
- self.assertEqual(inst.transform_merge, [])
- self.assertEqual(inst.merge, expected_data_merge)
- self.assertTrue(inst.has_updates)
-
- def test_apply_merge_all_w_server_timestamp(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_data = {"write_me": "value", "timestamp": SERVER_TIMESTAMP}
- inst = self._make_one(document_data)
-
- inst.apply_merge(True)
-
- expected_data_merge = [_make_field_path("write_me")]
- expected_transform_merge = [_make_field_path("timestamp")]
- expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")]
- self.assertEqual(inst.data_merge, expected_data_merge)
- self.assertEqual(inst.transform_merge, expected_transform_merge)
- self.assertEqual(inst.merge, expected_merge)
- self.assertTrue(inst.has_updates)
-
- def test_apply_merge_list_fields_w_empty_document(self):
- document_data = {}
- inst = self._make_one(document_data)
-
- with self.assertRaises(ValueError):
- inst.apply_merge(["nonesuch", "or.this"])
-
- def test_apply_merge_list_fields_w_unmerged_delete(self):
- from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-
- document_data = {
- "write_me": "value",
- "delete_me": DELETE_FIELD,
- "ignore_me": 123,
- "unmerged_delete": DELETE_FIELD,
- }
- inst = self._make_one(document_data)
-
- with self.assertRaises(ValueError):
- inst.apply_merge(["write_me", "delete_me"])
-
- def test_apply_merge_list_fields_w_delete(self):
- from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-
- document_data = {
- "write_me": "value",
- "delete_me": DELETE_FIELD,
- "ignore_me": 123,
- }
- inst = self._make_one(document_data)
-
- inst.apply_merge(["write_me", "delete_me"])
-
- expected_set_fields = {"write_me": "value"}
- expected_deleted_fields = [_make_field_path("delete_me")]
- self.assertEqual(inst.set_fields, expected_set_fields)
- self.assertEqual(inst.deleted_fields, expected_deleted_fields)
- self.assertTrue(inst.has_updates)
-
- def test_apply_merge_list_fields_w_prefixes(self):
-
- document_data = {"a": {"b": {"c": 123}}}
- inst = self._make_one(document_data)
-
- with self.assertRaises(ValueError):
- inst.apply_merge(["a", "a.b"])
-
- def test_apply_merge_list_fields_w_missing_data_string_paths(self):
-
- document_data = {"write_me": "value", "ignore_me": 123}
- inst = self._make_one(document_data)
-
- with self.assertRaises(ValueError):
- inst.apply_merge(["write_me", "nonesuch"])
-
- def test_apply_merge_list_fields_w_non_merge_field(self):
-
- document_data = {"write_me": "value", "ignore_me": 123}
- inst = self._make_one(document_data)
-
- inst.apply_merge([_make_field_path("write_me")])
-
- expected_set_fields = {"write_me": "value"}
- self.assertEqual(inst.set_fields, expected_set_fields)
- self.assertTrue(inst.has_updates)
-
- def test_apply_merge_list_fields_w_server_timestamp(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_data = {
- "write_me": "value",
- "timestamp": SERVER_TIMESTAMP,
- "ignored_stamp": SERVER_TIMESTAMP,
- }
- inst = self._make_one(document_data)
-
- inst.apply_merge([_make_field_path("write_me"), _make_field_path("timestamp")])
-
- expected_data_merge = [_make_field_path("write_me")]
- expected_transform_merge = [_make_field_path("timestamp")]
- expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")]
- self.assertEqual(inst.data_merge, expected_data_merge)
- self.assertEqual(inst.transform_merge, expected_transform_merge)
- self.assertEqual(inst.merge, expected_merge)
- expected_server_timestamps = [_make_field_path("timestamp")]
- self.assertEqual(inst.server_timestamps, expected_server_timestamps)
- self.assertTrue(inst.has_updates)
-
- def test_apply_merge_list_fields_w_array_remove(self):
- from google.cloud.firestore_v1beta1.transforms import ArrayRemove
-
- values = [2, 4, 8]
- document_data = {
- "write_me": "value",
- "remove_me": ArrayRemove(values),
- "ignored_remove_me": ArrayRemove((1, 3, 5)),
- }
- inst = self._make_one(document_data)
-
- inst.apply_merge([_make_field_path("write_me"), _make_field_path("remove_me")])
-
- expected_data_merge = [_make_field_path("write_me")]
- expected_transform_merge = [_make_field_path("remove_me")]
- expected_merge = [_make_field_path("remove_me"), _make_field_path("write_me")]
- self.assertEqual(inst.data_merge, expected_data_merge)
- self.assertEqual(inst.transform_merge, expected_transform_merge)
- self.assertEqual(inst.merge, expected_merge)
- expected_array_removes = {_make_field_path("remove_me"): values}
- self.assertEqual(inst.array_removes, expected_array_removes)
- self.assertTrue(inst.has_updates)
-
- def test_apply_merge_list_fields_w_array_union(self):
- from google.cloud.firestore_v1beta1.transforms import ArrayUnion
-
- values = [1, 3, 5]
- document_data = {
- "write_me": "value",
- "union_me": ArrayUnion(values),
- "ignored_union_me": ArrayUnion((2, 4, 8)),
- }
- inst = self._make_one(document_data)
-
- inst.apply_merge([_make_field_path("write_me"), _make_field_path("union_me")])
-
- expected_data_merge = [_make_field_path("write_me")]
- expected_transform_merge = [_make_field_path("union_me")]
- expected_merge = [_make_field_path("union_me"), _make_field_path("write_me")]
- self.assertEqual(inst.data_merge, expected_data_merge)
- self.assertEqual(inst.transform_merge, expected_transform_merge)
- self.assertEqual(inst.merge, expected_merge)
- expected_array_unions = {_make_field_path("union_me"): values}
- self.assertEqual(inst.array_unions, expected_array_unions)
- self.assertTrue(inst.has_updates)
-
-
-class Test_pbs_for_set_with_merge(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, document_data, merge):
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.pbs_for_set_with_merge(
- document_path, document_data, merge=merge
- )
-
- @staticmethod
- def _make_write_w_document(document_path, **data):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1._helpers import encode_dict
-
- return write_pb2.Write(
- update=document_pb2.Document(name=document_path, fields=encode_dict(data))
- )
-
- @staticmethod
- def _make_write_w_transform(document_path, fields):
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1.gapic import enums
-
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- transforms = [
- write_pb2.DocumentTransform.FieldTransform(
- field_path=field, set_to_server_value=server_val.REQUEST_TIME
- )
- for field in fields
- ]
-
- return write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path, field_transforms=transforms
- )
- )
-
- @staticmethod
- def _update_document_mask(update_pb, field_paths):
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
- update_pb.update_mask.CopyFrom(
- common_pb2.DocumentMask(field_paths=sorted(field_paths))
- )
-
- def test_with_merge_true_wo_transform(self):
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- document_data = {"cheese": 1.5, "crackers": True}
-
- write_pbs = self._call_fut(document_path, document_data, merge=True)
-
- update_pb = self._make_write_w_document(document_path, **document_data)
- self._update_document_mask(update_pb, field_paths=sorted(document_data))
- expected_pbs = [update_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_with_merge_field_wo_transform(self):
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- document_data = {"cheese": 1.5, "crackers": True}
-
- write_pbs = self._call_fut(document_path, document_data, merge=["cheese"])
-
- update_pb = self._make_write_w_document(
- document_path, cheese=document_data["cheese"]
- )
- self._update_document_mask(update_pb, field_paths=["cheese"])
- expected_pbs = [update_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_with_merge_true_w_transform(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- update_data = {"cheese": 1.5, "crackers": True}
- document_data = update_data.copy()
- document_data["butter"] = SERVER_TIMESTAMP
-
- write_pbs = self._call_fut(document_path, document_data, merge=True)
-
- update_pb = self._make_write_w_document(document_path, **update_data)
- self._update_document_mask(update_pb, field_paths=sorted(update_data))
- transform_pb = self._make_write_w_transform(document_path, fields=["butter"])
- expected_pbs = [update_pb, transform_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_with_merge_field_w_transform(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- update_data = {"cheese": 1.5, "crackers": True}
- document_data = update_data.copy()
- document_data["butter"] = SERVER_TIMESTAMP
-
- write_pbs = self._call_fut(
- document_path, document_data, merge=["cheese", "butter"]
- )
-
- update_pb = self._make_write_w_document(
- document_path, cheese=document_data["cheese"]
- )
- self._update_document_mask(update_pb, ["cheese"])
- transform_pb = self._make_write_w_transform(document_path, fields=["butter"])
- expected_pbs = [update_pb, transform_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_with_merge_field_w_transform_masking_simple(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- update_data = {"cheese": 1.5, "crackers": True}
- document_data = update_data.copy()
- document_data["butter"] = {"pecan": SERVER_TIMESTAMP}
-
- write_pbs = self._call_fut(document_path, document_data, merge=["butter.pecan"])
-
- update_pb = self._make_write_w_document(document_path)
- transform_pb = self._make_write_w_transform(
- document_path, fields=["butter.pecan"]
- )
- expected_pbs = [update_pb, transform_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_with_merge_field_w_transform_parent(self):
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
-
- document_path = _make_ref_string(u"little", u"town", u"of", u"ham")
- update_data = {"cheese": 1.5, "crackers": True}
- document_data = update_data.copy()
- document_data["butter"] = {"popcorn": "yum", "pecan": SERVER_TIMESTAMP}
-
- write_pbs = self._call_fut(
- document_path, document_data, merge=["cheese", "butter"]
- )
-
- update_pb = self._make_write_w_document(
- document_path, cheese=update_data["cheese"], butter={"popcorn": "yum"}
- )
- self._update_document_mask(update_pb, ["cheese", "butter"])
- transform_pb = self._make_write_w_transform(
- document_path, fields=["butter.pecan"]
- )
- expected_pbs = [update_pb, transform_pb]
- self.assertEqual(write_pbs, expected_pbs)
-
-
-class TestDocumentExtractorForUpdate(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1 import _helpers
-
- return _helpers.DocumentExtractorForUpdate
-
- def _make_one(self, document_data):
- return self._get_target_class()(document_data)
-
- def test_ctor_w_empty_document(self):
- document_data = {}
-
- inst = self._make_one(document_data)
- self.assertEqual(inst.top_level_paths, [])
-
- def test_ctor_w_simple_keys(self):
- document_data = {"a": 1, "b": 2, "c": 3}
-
- expected_paths = [
- _make_field_path("a"),
- _make_field_path("b"),
- _make_field_path("c"),
- ]
- inst = self._make_one(document_data)
- self.assertEqual(inst.top_level_paths, expected_paths)
-
- def test_ctor_w_nested_keys(self):
- document_data = {"a": {"d": {"e": 1}}, "b": {"f": 7}, "c": 3}
-
- expected_paths = [
- _make_field_path("a"),
- _make_field_path("b"),
- _make_field_path("c"),
- ]
- inst = self._make_one(document_data)
- self.assertEqual(inst.top_level_paths, expected_paths)
-
- def test_ctor_w_dotted_keys(self):
- document_data = {"a.d.e": 1, "b.f": 7, "c": 3}
-
- expected_paths = [
- _make_field_path("a", "d", "e"),
- _make_field_path("b", "f"),
- _make_field_path("c"),
- ]
- inst = self._make_one(document_data)
- self.assertEqual(inst.top_level_paths, expected_paths)
-
- def test_ctor_w_nested_dotted_keys(self):
- document_data = {"a.d.e": 1, "b.f": {"h.i": 9}, "c": 3}
-
- expected_paths = [
- _make_field_path("a", "d", "e"),
- _make_field_path("b", "f"),
- _make_field_path("c"),
- ]
- expected_set_fields = {"a": {"d": {"e": 1}}, "b": {"f": {"h.i": 9}}, "c": 3}
- inst = self._make_one(document_data)
- self.assertEqual(inst.top_level_paths, expected_paths)
- self.assertEqual(inst.set_fields, expected_set_fields)
-
-
-class Test_pbs_for_update(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, field_updates, option):
- from google.cloud.firestore_v1beta1._helpers import pbs_for_update
-
- return pbs_for_update(document_path, field_updates, option)
-
- def _helper(self, option=None, do_transform=False, **write_kwargs):
- from google.cloud.firestore_v1beta1 import _helpers
- from google.cloud.firestore_v1beta1.field_path import FieldPath
- from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP
- from google.cloud.firestore_v1beta1.gapic import enums
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- document_path = _make_ref_string(u"toy", u"car", u"onion", u"garlic")
- field_path1 = "bitez.yum"
- value = b"\x00\x01"
- field_path2 = "blog.internet"
-
- field_updates = {field_path1: value}
- if do_transform:
- field_updates[field_path2] = SERVER_TIMESTAMP
-
- write_pbs = self._call_fut(document_path, field_updates, option)
-
- map_pb = document_pb2.MapValue(fields={"yum": _value_pb(bytes_value=value)})
-
- field_paths = [field_path1]
-
- expected_update_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=document_path, fields={"bitez": _value_pb(map_value=map_pb)}
- ),
- update_mask=common_pb2.DocumentMask(field_paths=field_paths),
- **write_kwargs
- )
- if isinstance(option, _helpers.ExistsOption):
- precondition = common_pb2.Precondition(exists=False)
- expected_update_pb.current_document.CopyFrom(precondition)
- expected_pbs = [expected_update_pb]
- if do_transform:
- transform_paths = FieldPath.from_string(field_path2)
- server_val = enums.DocumentTransform.FieldTransform.ServerValue
- expected_transform_pb = write_pb2.Write(
- transform=write_pb2.DocumentTransform(
- document=document_path,
- field_transforms=[
- write_pb2.DocumentTransform.FieldTransform(
- field_path=transform_paths.to_api_repr(),
- set_to_server_value=server_val.REQUEST_TIME,
- )
- ],
- )
- )
- expected_pbs.append(expected_transform_pb)
- self.assertEqual(write_pbs, expected_pbs)
-
- def test_without_option(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
- precondition = common_pb2.Precondition(exists=True)
- self._helper(current_document=precondition)
-
- def test_with_exists_option(self):
- from google.cloud.firestore_v1beta1.client import _helpers
-
- option = _helpers.ExistsOption(False)
- self._helper(option=option)
-
- def test_update_and_transform(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
- precondition = common_pb2.Precondition(exists=True)
- self._helper(current_document=precondition, do_transform=True)
-
-
-class Test_pb_for_delete(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, option):
- from google.cloud.firestore_v1beta1._helpers import pb_for_delete
-
- return pb_for_delete(document_path, option)
-
- def _helper(self, option=None, **write_kwargs):
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- document_path = _make_ref_string(u"chicken", u"philly", u"one", u"two")
- write_pb = self._call_fut(document_path, option)
-
- expected_pb = write_pb2.Write(delete=document_path, **write_kwargs)
- self.assertEqual(write_pb, expected_pb)
-
- def test_without_option(self):
- self._helper()
-
- def test_with_option(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1 import _helpers
-
- update_time = timestamp_pb2.Timestamp(seconds=1309700594, nanos=822211297)
- option = _helpers.LastUpdateOption(update_time)
- precondition = common_pb2.Precondition(update_time=update_time)
- self._helper(option=option, current_document=precondition)
-
-
-class Test_get_transaction_id(unittest.TestCase):
- @staticmethod
- def _call_fut(transaction, **kwargs):
- from google.cloud.firestore_v1beta1._helpers import get_transaction_id
-
- return get_transaction_id(transaction, **kwargs)
-
- def test_no_transaction(self):
- ret_val = self._call_fut(None)
- self.assertIsNone(ret_val)
-
- def test_invalid_transaction(self):
- from google.cloud.firestore_v1beta1.transaction import Transaction
-
- transaction = Transaction(mock.sentinel.client)
- self.assertFalse(transaction.in_progress)
- with self.assertRaises(ValueError):
- self._call_fut(transaction)
-
- def test_after_writes_not_allowed(self):
- from google.cloud.firestore_v1beta1._helpers import ReadAfterWriteError
- from google.cloud.firestore_v1beta1.transaction import Transaction
-
- transaction = Transaction(mock.sentinel.client)
- transaction._id = b"under-hook"
- transaction._write_pbs.append(mock.sentinel.write)
-
- with self.assertRaises(ReadAfterWriteError):
- self._call_fut(transaction)
-
- def test_after_writes_allowed(self):
- from google.cloud.firestore_v1beta1.transaction import Transaction
-
- transaction = Transaction(mock.sentinel.client)
- txn_id = b"we-are-0fine"
- transaction._id = txn_id
- transaction._write_pbs.append(mock.sentinel.write)
-
- ret_val = self._call_fut(transaction, read_operation=False)
- self.assertEqual(ret_val, txn_id)
-
- def test_good_transaction(self):
- from google.cloud.firestore_v1beta1.transaction import Transaction
-
- transaction = Transaction(mock.sentinel.client)
- txn_id = b"doubt-it"
- transaction._id = txn_id
- self.assertTrue(transaction.in_progress)
-
- self.assertEqual(self._call_fut(transaction), txn_id)
-
-
-class Test_metadata_with_prefix(unittest.TestCase):
- @staticmethod
- def _call_fut(database_string):
- from google.cloud.firestore_v1beta1._helpers import metadata_with_prefix
-
- return metadata_with_prefix(database_string)
-
- def test_it(self):
- database_string = u"projects/prahj/databases/dee-bee"
- metadata = self._call_fut(database_string)
-
- self.assertEqual(metadata, [("google-cloud-resource-prefix", database_string)])
-
-
-class TestWriteOption(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1._helpers import WriteOption
-
- return WriteOption
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_modify_write(self):
- option = self._make_one()
- with self.assertRaises(NotImplementedError):
- option.modify_write(None)
-
-
-class TestLastUpdateOption(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1._helpers import LastUpdateOption
-
- return LastUpdateOption
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_constructor(self):
- option = self._make_one(mock.sentinel.timestamp)
- self.assertIs(option._last_update_time, mock.sentinel.timestamp)
-
- def test___eq___different_type(self):
- option = self._make_one(mock.sentinel.timestamp)
- other = object()
- self.assertFalse(option == other)
-
- def test___eq___different_timestamp(self):
- option = self._make_one(mock.sentinel.timestamp)
- other = self._make_one(mock.sentinel.other_timestamp)
- self.assertFalse(option == other)
-
- def test___eq___same_timestamp(self):
- option = self._make_one(mock.sentinel.timestamp)
- other = self._make_one(mock.sentinel.timestamp)
- self.assertTrue(option == other)
-
- def test_modify_write_update_time(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- timestamp_pb = timestamp_pb2.Timestamp(seconds=683893592, nanos=229362000)
- option = self._make_one(timestamp_pb)
- write_pb = write_pb2.Write()
- ret_val = option.modify_write(write_pb)
-
- self.assertIsNone(ret_val)
- expected_doc = common_pb2.Precondition(update_time=timestamp_pb)
- self.assertEqual(write_pb.current_document, expected_doc)
-
-
-class TestExistsOption(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1._helpers import ExistsOption
-
- return ExistsOption
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_constructor(self):
- option = self._make_one(mock.sentinel.totes_bool)
- self.assertIs(option._exists, mock.sentinel.totes_bool)
-
- def test___eq___different_type(self):
- option = self._make_one(mock.sentinel.timestamp)
- other = object()
- self.assertFalse(option == other)
-
- def test___eq___different_exists(self):
- option = self._make_one(True)
- other = self._make_one(False)
- self.assertFalse(option == other)
-
- def test___eq___same_exists(self):
- option = self._make_one(True)
- other = self._make_one(True)
- self.assertTrue(option == other)
-
- def test_modify_write(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- for exists in (True, False):
- option = self._make_one(exists)
- write_pb = write_pb2.Write()
- ret_val = option.modify_write(write_pb)
-
- self.assertIsNone(ret_val)
- expected_doc = common_pb2.Precondition(exists=exists)
- self.assertEqual(write_pb.current_document, expected_doc)
-
-
-def _value_pb(**kwargs):
- from google.cloud.firestore_v1beta1.proto.document_pb2 import Value
-
- return Value(**kwargs)
-
-
-def _make_ref_string(project, database, *path):
- from google.cloud.firestore_v1beta1 import _helpers
-
- doc_rel_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path)
- return u"projects/{}/databases/{}/documents/{}".format(
- project, database, doc_rel_path
- )
-
-
-def _make_credentials():
- import google.auth.credentials
-
- return mock.Mock(spec=google.auth.credentials.Credentials)
-
-
-def _make_client(project="quark"):
- from google.cloud.firestore_v1beta1.client import Client
-
- credentials = _make_credentials()
-
- with pytest.deprecated_call():
- return Client(project=project, credentials=credentials)
-
-
-def _make_field_path(*fields):
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path.FieldPath(*fields)
diff --git a/tests/unit/v1beta1/test_batch.py b/tests/unit/v1beta1/test_batch.py
deleted file mode 100644
index 8314247515..0000000000
--- a/tests/unit/v1beta1/test_batch.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-import pytest
-
-
-class TestWriteBatch(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.batch import WriteBatch
-
- return WriteBatch
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_constructor(self):
- batch = self._make_one(mock.sentinel.client)
- self.assertIs(batch._client, mock.sentinel.client)
- self.assertEqual(batch._write_pbs, [])
- self.assertIsNone(batch.write_results)
- self.assertIsNone(batch.commit_time)
-
- def test__add_write_pbs(self):
- batch = self._make_one(mock.sentinel.client)
- self.assertEqual(batch._write_pbs, [])
- batch._add_write_pbs([mock.sentinel.write1, mock.sentinel.write2])
- self.assertEqual(batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2])
-
- def test_create(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("this", "one")
- document_data = {"a": 10, "b": 2.5}
- ret_val = batch.create(reference, document_data)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={
- "a": _value_pb(integer_value=document_data["a"]),
- "b": _value_pb(double_value=document_data["b"]),
- },
- ),
- current_document=common_pb2.Precondition(exists=False),
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_set(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("another", "one")
- field = "zapzap"
- value = u"meadows and flowers"
- document_data = {field: value}
- ret_val = batch.set(reference, document_data)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={field: _value_pb(string_value=value)},
- )
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_set_merge(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("another", "one")
- field = "zapzap"
- value = u"meadows and flowers"
- document_data = {field: value}
- ret_val = batch.set(reference, document_data, merge=True)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={field: _value_pb(string_value=value)},
- ),
- update_mask={"field_paths": [field]},
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_update(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("cats", "cradle")
- field_path = "head.foot"
- value = u"knees toes shoulders"
- field_updates = {field_path: value}
-
- ret_val = batch.update(reference, field_updates)
- self.assertIsNone(ret_val)
-
- map_pb = document_pb2.MapValue(fields={"foot": _value_pb(string_value=value)})
- new_write_pb = write_pb2.Write(
- update=document_pb2.Document(
- name=reference._document_path,
- fields={"head": _value_pb(map_value=map_pb)},
- ),
- update_mask=common_pb2.DocumentMask(field_paths=[field_path]),
- current_document=common_pb2.Precondition(exists=True),
- )
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_delete(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- client = _make_client()
- batch = self._make_one(client)
- self.assertEqual(batch._write_pbs, [])
-
- reference = client.document("early", "mornin", "dawn", "now")
- ret_val = batch.delete(reference)
- self.assertIsNone(ret_val)
- new_write_pb = write_pb2.Write(delete=reference._document_path)
- self.assertEqual(batch._write_pbs, [new_write_pb])
-
- def test_commit(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- # Create a minimal fake GAPIC with a dummy result.
- firestore_api = mock.Mock(spec=["commit"])
- timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798)
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()],
- commit_time=timestamp,
- )
- firestore_api.commit.return_value = commit_response
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("grand")
- client._firestore_api_internal = firestore_api
-
- # Actually make a batch with some mutations and call commit().
- batch = self._make_one(client)
- document1 = client.document("a", "b")
- batch.create(document1, {"ten": 10, "buck": u"ets"})
- document2 = client.document("c", "d", "e", "f")
- batch.delete(document2)
- write_pbs = batch._write_pbs[::]
-
- write_results = batch.commit()
- self.assertEqual(write_results, list(commit_response.write_results))
- self.assertEqual(batch.write_results, write_results)
- self.assertEqual(batch.commit_time, timestamp)
- # Make sure batch has no more "changes".
- self.assertEqual(batch._write_pbs, [])
-
- # Verify the mocks.
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_as_context_mgr_wo_error(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- firestore_api = mock.Mock(spec=["commit"])
- timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798)
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()],
- commit_time=timestamp,
- )
- firestore_api.commit.return_value = commit_response
- client = _make_client()
- client._firestore_api_internal = firestore_api
- batch = self._make_one(client)
- document1 = client.document("a", "b")
- document2 = client.document("c", "d", "e", "f")
-
- with batch as ctx_mgr:
- self.assertIs(ctx_mgr, batch)
- ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"})
- ctx_mgr.delete(document2)
- write_pbs = batch._write_pbs[::]
-
- self.assertEqual(batch.write_results, list(commit_response.write_results))
- self.assertEqual(batch.commit_time, timestamp)
- # Make sure batch has no more "changes".
- self.assertEqual(batch._write_pbs, [])
-
- # Verify the mocks.
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_as_context_mgr_w_error(self):
- firestore_api = mock.Mock(spec=["commit"])
- client = _make_client()
- client._firestore_api_internal = firestore_api
- batch = self._make_one(client)
- document1 = client.document("a", "b")
- document2 = client.document("c", "d", "e", "f")
-
- with self.assertRaises(RuntimeError):
- with batch as ctx_mgr:
- ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"})
- ctx_mgr.delete(document2)
- raise RuntimeError("testing")
-
- self.assertIsNone(batch.write_results)
- self.assertIsNone(batch.commit_time)
- # batch still has its changes
- self.assertEqual(len(batch._write_pbs), 2)
-
- firestore_api.commit.assert_not_called()
-
-
-def _value_pb(**kwargs):
- from google.cloud.firestore_v1beta1.proto.document_pb2 import Value
-
- return Value(**kwargs)
-
-
-def _make_credentials():
- import google.auth.credentials
-
- return mock.Mock(spec=google.auth.credentials.Credentials)
-
-
-def _make_client(project="seventy-nine"):
- from google.cloud.firestore_v1beta1.client import Client
-
- credentials = _make_credentials()
-
- with pytest.deprecated_call():
- return Client(project=project, credentials=credentials)
diff --git a/tests/unit/v1beta1/test_client.py b/tests/unit/v1beta1/test_client.py
deleted file mode 100644
index 4aa5a36efb..0000000000
--- a/tests/unit/v1beta1/test_client.py
+++ /dev/null
@@ -1,667 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import types
-import unittest
-
-import mock
-import pytest
-
-
-class TestClient(unittest.TestCase):
-
- PROJECT = "my-prahjekt"
-
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.client import Client
-
- return Client
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def _make_default_one(self):
- credentials = _make_credentials()
- return self._make_one(project=self.PROJECT, credentials=credentials)
-
- def test_constructor(self):
- from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE
-
- credentials = _make_credentials()
-
- with pytest.deprecated_call():
- client = self._make_one(project=self.PROJECT, credentials=credentials)
-
- self.assertEqual(client.project, self.PROJECT)
- self.assertEqual(client._credentials, credentials)
- self.assertEqual(client._database, DEFAULT_DATABASE)
-
- def test_constructor_explicit(self):
- credentials = _make_credentials()
- database = "now-db"
-
- with pytest.deprecated_call():
- client = self._make_one(
- project=self.PROJECT, credentials=credentials, database=database
- )
-
- self.assertEqual(client.project, self.PROJECT)
- self.assertEqual(client._credentials, credentials)
- self.assertEqual(client._database, database)
-
- @mock.patch(
- "google.cloud.firestore_v1beta1.gapic.firestore_client." "FirestoreClient",
- autospec=True,
- return_value=mock.sentinel.firestore_api,
- )
- def test__firestore_api_property(self, mock_client):
- mock_client.SERVICE_ADDRESS = "endpoint"
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- self.assertIsNone(client._firestore_api_internal)
- firestore_api = client._firestore_api
- self.assertIs(firestore_api, mock_client.return_value)
- self.assertIs(firestore_api, client._firestore_api_internal)
- mock_client.assert_called_once_with(transport=client._transport)
-
- # Call again to show that it is cached, but call count is still 1.
- self.assertIs(client._firestore_api, mock_client.return_value)
- self.assertEqual(mock_client.call_count, 1)
-
- def test___database_string_property(self):
- credentials = _make_credentials()
- database = "cheeeeez"
-
- with pytest.deprecated_call():
- client = self._make_one(
- project=self.PROJECT, credentials=credentials, database=database
- )
-
- self.assertIsNone(client._database_string_internal)
- database_string = client._database_string
- expected = "projects/{}/databases/{}".format(client.project, client._database)
- self.assertEqual(database_string, expected)
- self.assertIs(database_string, client._database_string_internal)
-
- # Swap it out with a unique value to verify it is cached.
- client._database_string_internal = mock.sentinel.cached
- self.assertIs(client._database_string, mock.sentinel.cached)
-
- def test___rpc_metadata_property(self):
- credentials = _make_credentials()
- database = "quanta"
-
- with pytest.deprecated_call():
- client = self._make_one(
- project=self.PROJECT, credentials=credentials, database=database
- )
-
- self.assertEqual(
- client._rpc_metadata,
- [("google-cloud-resource-prefix", client._database_string)],
- )
-
- def test_collection_factory(self):
- from google.cloud.firestore_v1beta1.collection import CollectionReference
-
- collection_id = "users"
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- collection = client.collection(collection_id)
-
- self.assertEqual(collection._path, (collection_id,))
- self.assertIs(collection._client, client)
- self.assertIsInstance(collection, CollectionReference)
-
- def test_collection_factory_nested(self):
- from google.cloud.firestore_v1beta1.collection import CollectionReference
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- parts = ("users", "alovelace", "beep")
- collection_path = "/".join(parts)
- collection1 = client.collection(collection_path)
-
- self.assertEqual(collection1._path, parts)
- self.assertIs(collection1._client, client)
- self.assertIsInstance(collection1, CollectionReference)
-
- # Make sure using segments gives the same result.
- collection2 = client.collection(*parts)
- self.assertEqual(collection2._path, parts)
- self.assertIs(collection2._client, client)
- self.assertIsInstance(collection2, CollectionReference)
-
- def test_document_factory(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- parts = ("rooms", "roomA")
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- doc_path = "/".join(parts)
- document1 = client.document(doc_path)
-
- self.assertEqual(document1._path, parts)
- self.assertIs(document1._client, client)
- self.assertIsInstance(document1, DocumentReference)
-
- # Make sure using segments gives the same result.
- document2 = client.document(*parts)
- self.assertEqual(document2._path, parts)
- self.assertIs(document2._client, client)
- self.assertIsInstance(document2, DocumentReference)
-
- def test_document_factory_nested(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- parts = ("rooms", "roomA", "shoes", "dressy")
- doc_path = "/".join(parts)
- document1 = client.document(doc_path)
-
- self.assertEqual(document1._path, parts)
- self.assertIs(document1._client, client)
- self.assertIsInstance(document1, DocumentReference)
-
- # Make sure using segments gives the same result.
- document2 = client.document(*parts)
- self.assertEqual(document2._path, parts)
- self.assertIs(document2._client, client)
- self.assertIsInstance(document2, DocumentReference)
-
- def test_field_path(self):
- klass = self._get_target_class()
- self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c")
-
- def test_write_option_last_update(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1._helpers import LastUpdateOption
-
- timestamp = timestamp_pb2.Timestamp(seconds=1299767599, nanos=811111097)
-
- klass = self._get_target_class()
- option = klass.write_option(last_update_time=timestamp)
- self.assertIsInstance(option, LastUpdateOption)
- self.assertEqual(option._last_update_time, timestamp)
-
- def test_write_option_exists(self):
- from google.cloud.firestore_v1beta1._helpers import ExistsOption
-
- klass = self._get_target_class()
-
- option1 = klass.write_option(exists=False)
- self.assertIsInstance(option1, ExistsOption)
- self.assertFalse(option1._exists)
-
- option2 = klass.write_option(exists=True)
- self.assertIsInstance(option2, ExistsOption)
- self.assertTrue(option2._exists)
-
- def test_write_open_neither_arg(self):
- from google.cloud.firestore_v1beta1.client import _BAD_OPTION_ERR
-
- klass = self._get_target_class()
- with self.assertRaises(TypeError) as exc_info:
- klass.write_option()
-
- self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,))
-
- def test_write_multiple_args(self):
- from google.cloud.firestore_v1beta1.client import _BAD_OPTION_ERR
-
- klass = self._get_target_class()
- with self.assertRaises(TypeError) as exc_info:
- klass.write_option(exists=False, last_update_time=mock.sentinel.timestamp)
-
- self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,))
-
- def test_write_bad_arg(self):
- from google.cloud.firestore_v1beta1.client import _BAD_OPTION_ERR
-
- klass = self._get_target_class()
- with self.assertRaises(TypeError) as exc_info:
- klass.write_option(spinach="popeye")
-
- extra = "{!r} was provided".format("spinach")
- self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR, extra))
-
- def test_collections(self):
- from google.api_core.page_iterator import Iterator
- from google.api_core.page_iterator import Page
- from google.cloud.firestore_v1beta1.collection import CollectionReference
-
- collection_ids = ["users", "projects"]
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- firestore_api = mock.Mock(spec=["list_collection_ids"])
- client._firestore_api_internal = firestore_api
-
- class _Iterator(Iterator):
- def __init__(self, pages):
- super(_Iterator, self).__init__(client=None)
- self._pages = pages
-
- def _next_page(self):
- if self._pages:
- page, self._pages = self._pages[0], self._pages[1:]
- return Page(self, page, self.item_to_value)
-
- iterator = _Iterator(pages=[collection_ids])
- firestore_api.list_collection_ids.return_value = iterator
-
- collections = list(client.collections())
-
- self.assertEqual(len(collections), len(collection_ids))
- for collection, collection_id in zip(collections, collection_ids):
- self.assertIsInstance(collection, CollectionReference)
- self.assertEqual(collection.parent, None)
- self.assertEqual(collection.id, collection_id)
-
- firestore_api.list_collection_ids.assert_called_once_with(
- client._database_string, metadata=client._rpc_metadata
- )
-
- def _get_all_helper(self, client, references, document_pbs, **kwargs):
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["batch_get_documents"])
- response_iterator = iter(document_pbs)
- firestore_api.batch_get_documents.return_value = response_iterator
-
- # Attach the fake GAPIC to a real client.
- client._firestore_api_internal = firestore_api
-
- # Actually call get_all().
- snapshots = client.get_all(references, **kwargs)
- self.assertIsInstance(snapshots, types.GeneratorType)
-
- return list(snapshots)
-
- def _info_for_get_all(self, data1, data2):
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- document1 = client.document("pineapple", "lamp1")
- document2 = client.document("pineapple", "lamp2")
-
- # Make response protobufs.
- document_pb1, read_time = _doc_get_info(document1._document_path, data1)
- response1 = _make_batch_response(found=document_pb1, read_time=read_time)
-
- document_pb2, read_time = _doc_get_info(document2._document_path, data2)
- response2 = _make_batch_response(found=document_pb2, read_time=read_time)
-
- return client, document1, document2, response1, response2
-
- def test_get_all(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-
- data1 = {"a": u"cheese"}
- data2 = {"b": True, "c": 18}
- info = self._info_for_get_all(data1, data2)
- client, document1, document2, response1, response2 = info
-
- # Exercise the mocked ``batch_get_documents``.
- field_paths = ["a", "b"]
- snapshots = self._get_all_helper(
- client,
- [document1, document2],
- [response1, response2],
- field_paths=field_paths,
- )
- self.assertEqual(len(snapshots), 2)
-
- snapshot1 = snapshots[0]
- self.assertIsInstance(snapshot1, DocumentSnapshot)
- self.assertIs(snapshot1._reference, document1)
- self.assertEqual(snapshot1._data, data1)
-
- snapshot2 = snapshots[1]
- self.assertIsInstance(snapshot2, DocumentSnapshot)
- self.assertIs(snapshot2._reference, document2)
- self.assertEqual(snapshot2._data, data2)
-
- # Verify the call to the mock.
- doc_paths = [document1._document_path, document2._document_path]
- mask = common_pb2.DocumentMask(field_paths=field_paths)
- client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- mask,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_get_all_with_transaction(self):
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-
- data = {"so-much": 484}
- info = self._info_for_get_all(data, {})
- client, document, _, response, _ = info
- transaction = client.transaction()
- txn_id = b"the-man-is-non-stop"
- transaction._id = txn_id
-
- # Exercise the mocked ``batch_get_documents``.
- snapshots = self._get_all_helper(
- client, [document], [response], transaction=transaction
- )
- self.assertEqual(len(snapshots), 1)
-
- snapshot = snapshots[0]
- self.assertIsInstance(snapshot, DocumentSnapshot)
- self.assertIs(snapshot._reference, document)
- self.assertEqual(snapshot._data, data)
-
- # Verify the call to the mock.
- doc_paths = [document._document_path]
- client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- None,
- transaction=txn_id,
- metadata=client._rpc_metadata,
- )
-
- def test_get_all_unknown_result(self):
- from google.cloud.firestore_v1beta1.client import _BAD_DOC_TEMPLATE
-
- info = self._info_for_get_all({"z": 28.5}, {})
- client, document, _, _, response = info
-
- # Exercise the mocked ``batch_get_documents``.
- with self.assertRaises(ValueError) as exc_info:
- self._get_all_helper(client, [document], [response])
-
- err_msg = _BAD_DOC_TEMPLATE.format(response.found.name)
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
- # Verify the call to the mock.
- doc_paths = [document._document_path]
- client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- None,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_get_all_wrong_order(self):
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-
- data1 = {"up": 10}
- data2 = {"down": -10}
- info = self._info_for_get_all(data1, data2)
- client, document1, document2, response1, response2 = info
- document3 = client.document("pineapple", "lamp3")
- response3 = _make_batch_response(missing=document3._document_path)
-
- # Exercise the mocked ``batch_get_documents``.
- snapshots = self._get_all_helper(
- client, [document1, document2, document3], [response2, response1, response3]
- )
-
- self.assertEqual(len(snapshots), 3)
-
- snapshot1 = snapshots[0]
- self.assertIsInstance(snapshot1, DocumentSnapshot)
- self.assertIs(snapshot1._reference, document2)
- self.assertEqual(snapshot1._data, data2)
-
- snapshot2 = snapshots[1]
- self.assertIsInstance(snapshot2, DocumentSnapshot)
- self.assertIs(snapshot2._reference, document1)
- self.assertEqual(snapshot2._data, data1)
-
- self.assertFalse(snapshots[2].exists)
-
- # Verify the call to the mock.
- doc_paths = [
- document1._document_path,
- document2._document_path,
- document3._document_path,
- ]
- client._firestore_api.batch_get_documents.assert_called_once_with(
- client._database_string,
- doc_paths,
- None,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_batch(self):
- from google.cloud.firestore_v1beta1.batch import WriteBatch
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- batch = client.batch()
- self.assertIsInstance(batch, WriteBatch)
- self.assertIs(batch._client, client)
- self.assertEqual(batch._write_pbs, [])
-
- def test_transaction(self):
- from google.cloud.firestore_v1beta1.transaction import Transaction
-
- with pytest.deprecated_call():
- client = self._make_default_one()
-
- transaction = client.transaction(max_attempts=3, read_only=True)
- self.assertIsInstance(transaction, Transaction)
- self.assertEqual(transaction._write_pbs, [])
- self.assertEqual(transaction._max_attempts, 3)
- self.assertTrue(transaction._read_only)
- self.assertIsNone(transaction._id)
-
-
-class Test__reference_info(unittest.TestCase):
- @staticmethod
- def _call_fut(references):
- from google.cloud.firestore_v1beta1.client import _reference_info
-
- return _reference_info(references)
-
- def test_it(self):
- from google.cloud.firestore_v1beta1.client import Client
-
- credentials = _make_credentials()
-
- with pytest.deprecated_call():
- client = Client(project="hi-projject", credentials=credentials)
-
- reference1 = client.document("a", "b")
- reference2 = client.document("a", "b", "c", "d")
- reference3 = client.document("a", "b")
- reference4 = client.document("f", "g")
-
- doc_path1 = reference1._document_path
- doc_path2 = reference2._document_path
- doc_path3 = reference3._document_path
- doc_path4 = reference4._document_path
- self.assertEqual(doc_path1, doc_path3)
-
- document_paths, reference_map = self._call_fut(
- [reference1, reference2, reference3, reference4]
- )
- self.assertEqual(document_paths, [doc_path1, doc_path2, doc_path3, doc_path4])
- # reference3 over-rides reference1.
- expected_map = {
- doc_path2: reference2,
- doc_path3: reference3,
- doc_path4: reference4,
- }
- self.assertEqual(reference_map, expected_map)
-
-
-class Test__get_reference(unittest.TestCase):
- @staticmethod
- def _call_fut(document_path, reference_map):
- from google.cloud.firestore_v1beta1.client import _get_reference
-
- return _get_reference(document_path, reference_map)
-
- def test_success(self):
- doc_path = "a/b/c"
- reference_map = {doc_path: mock.sentinel.reference}
- self.assertIs(self._call_fut(doc_path, reference_map), mock.sentinel.reference)
-
- def test_failure(self):
- from google.cloud.firestore_v1beta1.client import _BAD_DOC_TEMPLATE
-
- doc_path = "1/888/call-now"
- with self.assertRaises(ValueError) as exc_info:
- self._call_fut(doc_path, {})
-
- err_msg = _BAD_DOC_TEMPLATE.format(doc_path)
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
-
-class Test__parse_batch_get(unittest.TestCase):
- @staticmethod
- def _call_fut(get_doc_response, reference_map, client=mock.sentinel.client):
- from google.cloud.firestore_v1beta1.client import _parse_batch_get
-
- return _parse_batch_get(get_doc_response, reference_map, client)
-
- @staticmethod
- def _dummy_ref_string():
- from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE
-
- project = u"bazzzz"
- collection_id = u"fizz"
- document_id = u"buzz"
- return u"projects/{}/databases/{}/documents/{}/{}".format(
- project, DEFAULT_DATABASE, collection_id, document_id
- )
-
- def test_found(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-
- now = datetime.datetime.utcnow()
- read_time = _datetime_to_pb_timestamp(now)
- delta = datetime.timedelta(seconds=100)
- update_time = _datetime_to_pb_timestamp(now - delta)
- create_time = _datetime_to_pb_timestamp(now - 2 * delta)
-
- ref_string = self._dummy_ref_string()
- document_pb = document_pb2.Document(
- name=ref_string,
- fields={
- "foo": document_pb2.Value(double_value=1.5),
- "bar": document_pb2.Value(string_value=u"skillz"),
- },
- create_time=create_time,
- update_time=update_time,
- )
- response_pb = _make_batch_response(found=document_pb, read_time=read_time)
-
- reference_map = {ref_string: mock.sentinel.reference}
- snapshot = self._call_fut(response_pb, reference_map)
- self.assertIsInstance(snapshot, DocumentSnapshot)
- self.assertIs(snapshot._reference, mock.sentinel.reference)
- self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
- self.assertTrue(snapshot._exists)
- self.assertEqual(snapshot.read_time, read_time)
- self.assertEqual(snapshot.create_time, create_time)
- self.assertEqual(snapshot.update_time, update_time)
-
- def test_missing(self):
- ref_string = self._dummy_ref_string()
- response_pb = _make_batch_response(missing=ref_string)
-
- snapshot = self._call_fut(response_pb, {})
- self.assertFalse(snapshot.exists)
-
- def test_unset_result_type(self):
- response_pb = _make_batch_response()
- with self.assertRaises(ValueError):
- self._call_fut(response_pb, {})
-
- def test_unknown_result_type(self):
- response_pb = mock.Mock(spec=["WhichOneof"])
- response_pb.WhichOneof.return_value = "zoob_value"
-
- with self.assertRaises(ValueError):
- self._call_fut(response_pb, {})
-
- response_pb.WhichOneof.assert_called_once_with("result")
-
-
-class Test__get_doc_mask(unittest.TestCase):
- @staticmethod
- def _call_fut(field_paths):
- from google.cloud.firestore_v1beta1.client import _get_doc_mask
-
- return _get_doc_mask(field_paths)
-
- def test_none(self):
- self.assertIsNone(self._call_fut(None))
-
- def test_paths(self):
- from google.cloud.firestore_v1beta1.proto import common_pb2
-
- field_paths = ["a.b", "c"]
- result = self._call_fut(field_paths)
- expected = common_pb2.DocumentMask(field_paths=field_paths)
- self.assertEqual(result, expected)
-
-
-def _make_credentials():
- import google.auth.credentials
-
- return mock.Mock(spec=google.auth.credentials.Credentials)
-
-
-def _make_batch_response(**kwargs):
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
-
- return firestore_pb2.BatchGetDocumentsResponse(**kwargs)
-
-
-def _doc_get_info(ref_string, values):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.firestore_v1beta1 import _helpers
-
- now = datetime.datetime.utcnow()
- read_time = _datetime_to_pb_timestamp(now)
- delta = datetime.timedelta(seconds=100)
- update_time = _datetime_to_pb_timestamp(now - delta)
- create_time = _datetime_to_pb_timestamp(now - 2 * delta)
-
- document_pb = document_pb2.Document(
- name=ref_string,
- fields=_helpers.encode_dict(values),
- create_time=create_time,
- update_time=update_time,
- )
-
- return document_pb, read_time
diff --git a/tests/unit/v1beta1/test_collection.py b/tests/unit/v1beta1/test_collection.py
deleted file mode 100644
index 2bc7695ae9..0000000000
--- a/tests/unit/v1beta1/test_collection.py
+++ /dev/null
@@ -1,593 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import types
-import unittest
-
-import mock
-import pytest
-import six
-
-
-class TestCollectionReference(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.collection import CollectionReference
-
- return CollectionReference
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- @staticmethod
- def _get_public_methods(klass):
- return set(
- name
- for name, value in six.iteritems(klass.__dict__)
- if (not name.startswith("_") and isinstance(value, types.FunctionType))
- )
-
- def test_query_method_matching(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- query_methods = self._get_public_methods(Query)
- klass = self._get_target_class()
- collection_methods = self._get_public_methods(klass)
- # Make sure every query method is present on
- # ``CollectionReference``.
- self.assertLessEqual(query_methods, collection_methods)
-
- def test_constructor(self):
- collection_id1 = "rooms"
- document_id = "roomA"
- collection_id2 = "messages"
- client = mock.sentinel.client
-
- collection = self._make_one(
- collection_id1, document_id, collection_id2, client=client
- )
- self.assertIs(collection._client, client)
- expected_path = (collection_id1, document_id, collection_id2)
- self.assertEqual(collection._path, expected_path)
-
- def test_constructor_invalid_path(self):
- with self.assertRaises(ValueError):
- self._make_one()
- with self.assertRaises(ValueError):
- self._make_one(99, "doc", "bad-collection-id")
- with self.assertRaises(ValueError):
- self._make_one("bad-document-ID", None, "sub-collection")
- with self.assertRaises(ValueError):
- self._make_one("Just", "A-Document")
-
- def test_constructor_invalid_kwarg(self):
- with self.assertRaises(TypeError):
- self._make_one("Coh-lek-shun", donut=True)
-
- def test___eq___other_type(self):
- client = mock.sentinel.client
- collection = self._make_one("name", client=client)
- other = object()
- self.assertFalse(collection == other)
-
- def test___eq___different_path_same_client(self):
- client = mock.sentinel.client
- collection = self._make_one("name", client=client)
- other = self._make_one("other", client=client)
- self.assertFalse(collection == other)
-
- def test___eq___same_path_different_client(self):
- client = mock.sentinel.client
- other_client = mock.sentinel.other_client
- collection = self._make_one("name", client=client)
- other = self._make_one("name", client=other_client)
- self.assertFalse(collection == other)
-
- def test___eq___same_path_same_client(self):
- client = mock.sentinel.client
- collection = self._make_one("name", client=client)
- other = self._make_one("name", client=client)
- self.assertTrue(collection == other)
-
- def test_id_property(self):
- collection_id = "hi-bob"
- collection = self._make_one(collection_id)
- self.assertEqual(collection.id, collection_id)
-
- def test_parent_property(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- collection_id1 = "grocery-store"
- document_id = "market"
- collection_id2 = "darth"
- client = _make_client()
- collection = self._make_one(
- collection_id1, document_id, collection_id2, client=client
- )
-
- parent = collection.parent
- self.assertIsInstance(parent, DocumentReference)
- self.assertIs(parent._client, client)
- self.assertEqual(parent._path, (collection_id1, document_id))
-
- def test_parent_property_top_level(self):
- collection = self._make_one("tahp-leh-vull")
- self.assertIsNone(collection.parent)
-
- def test_document_factory_explicit_id(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- collection_id = "grocery-store"
- document_id = "market"
- client = _make_client()
- collection = self._make_one(collection_id, client=client)
-
- child = collection.document(document_id)
- self.assertIsInstance(child, DocumentReference)
- self.assertIs(child._client, client)
- self.assertEqual(child._path, (collection_id, document_id))
-
- @mock.patch(
- "google.cloud.firestore_v1beta1.collection._auto_id",
- return_value="zorpzorpthreezorp012",
- )
- def test_document_factory_auto_id(self, mock_auto_id):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- collection_name = "space-town"
- client = _make_client()
- collection = self._make_one(collection_name, client=client)
-
- child = collection.document()
- self.assertIsInstance(child, DocumentReference)
- self.assertIs(child._client, client)
- self.assertEqual(child._path, (collection_name, mock_auto_id.return_value))
-
- mock_auto_id.assert_called_once_with()
-
- def test__parent_info_top_level(self):
- client = _make_client()
- collection_id = "soap"
- collection = self._make_one(collection_id, client=client)
-
- parent_path, expected_prefix = collection._parent_info()
-
- expected_path = "projects/{}/databases/{}/documents".format(
- client.project, client._database
- )
- self.assertEqual(parent_path, expected_path)
- prefix = "{}/{}".format(expected_path, collection_id)
- self.assertEqual(expected_prefix, prefix)
-
- def test__parent_info_nested(self):
- collection_id1 = "bar"
- document_id = "baz"
- collection_id2 = "chunk"
- client = _make_client()
- collection = self._make_one(
- collection_id1, document_id, collection_id2, client=client
- )
-
- parent_path, expected_prefix = collection._parent_info()
-
- expected_path = "projects/{}/databases/{}/documents/{}/{}".format(
- client.project, client._database, collection_id1, document_id
- )
- self.assertEqual(parent_path, expected_path)
- prefix = "{}/{}".format(expected_path, collection_id2)
- self.assertEqual(expected_prefix, prefix)
-
- def test_add_auto_assigned(self):
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.document import DocumentReference
- from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP
- from google.cloud.firestore_v1beta1._helpers import pbs_for_set_no_merge
-
- # Create a minimal fake GAPIC add attach it to a real client.
- firestore_api = mock.Mock(spec=["create_document", "commit"])
- write_result = mock.Mock(
- update_time=mock.sentinel.update_time, spec=["update_time"]
- )
- commit_response = mock.Mock(
- write_results=[write_result],
- spec=["write_results", "commit_time"],
- commit_time=mock.sentinel.commit_time,
- )
- firestore_api.commit.return_value = commit_response
- create_doc_response = document_pb2.Document()
- firestore_api.create_document.return_value = create_doc_response
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Actually make a collection.
- collection = self._make_one("grand-parent", "parent", "child", client=client)
-
- # Add a dummy response for the fake GAPIC.
- parent_path = collection.parent._document_path
- auto_assigned_id = "cheezburger"
- name = "{}/{}/{}".format(parent_path, collection.id, auto_assigned_id)
- create_doc_response = document_pb2.Document(name=name)
- create_doc_response.update_time.FromDatetime(datetime.datetime.utcnow())
- firestore_api.create_document.return_value = create_doc_response
-
- # Actually call add() on our collection; include a transform to make
- # sure transforms during adds work.
- document_data = {"been": "here", "now": SERVER_TIMESTAMP}
- update_time, document_ref = collection.add(document_data)
-
- # Verify the response and the mocks.
- self.assertIs(update_time, mock.sentinel.update_time)
- self.assertIsInstance(document_ref, DocumentReference)
- self.assertIs(document_ref._client, client)
- expected_path = collection._path + (auto_assigned_id,)
- self.assertEqual(document_ref._path, expected_path)
-
- expected_document_pb = document_pb2.Document()
- firestore_api.create_document.assert_called_once_with(
- parent_path,
- collection_id=collection.id,
- document_id=None,
- document=expected_document_pb,
- mask=None,
- metadata=client._rpc_metadata,
- )
- write_pbs = pbs_for_set_no_merge(document_ref._document_path, document_data)
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- write_pbs,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- @staticmethod
- def _write_pb_for_create(document_path, document_data):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1 import _helpers
-
- return write_pb2.Write(
- update=document_pb2.Document(
- name=document_path, fields=_helpers.encode_dict(document_data)
- ),
- current_document=common_pb2.Precondition(exists=False),
- )
-
- def test_add_explicit_id(self):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
- write_result = mock.Mock(
- update_time=mock.sentinel.update_time, spec=["update_time"]
- )
- commit_response = mock.Mock(
- write_results=[write_result],
- spec=["write_results", "commit_time"],
- commit_time=mock.sentinel.commit_time,
- )
- firestore_api.commit.return_value = commit_response
-
- # Attach the fake GAPIC to a real client.
- client = _make_client()
- client._firestore_api_internal = firestore_api
-
- # Actually make a collection and call add().
- collection = self._make_one("parent", client=client)
- document_data = {"zorp": 208.75, "i-did-not": b"know that"}
- doc_id = "child"
- update_time, document_ref = collection.add(document_data, document_id=doc_id)
-
- # Verify the response and the mocks.
- self.assertIs(update_time, mock.sentinel.update_time)
- self.assertIsInstance(document_ref, DocumentReference)
- self.assertIs(document_ref._client, client)
- self.assertEqual(document_ref._path, (collection.id, doc_id))
-
- write_pb = self._write_pb_for_create(document_ref._document_path, document_data)
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_select(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- field_paths = ["a", "b"]
- query = collection.select(field_paths)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- projection_paths = [
- field_ref.field_path for field_ref in query._projection.fields
- ]
- self.assertEqual(projection_paths, field_paths)
-
- @staticmethod
- def _make_field_filter_pb(field_path, op_string, value):
- from google.cloud.firestore_v1beta1.proto import query_pb2
- from google.cloud.firestore_v1beta1 import _helpers
- from google.cloud.firestore_v1beta1.query import _enum_from_op_string
-
- return query_pb2.StructuredQuery.FieldFilter(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- op=_enum_from_op_string(op_string),
- value=_helpers.encode_value(value),
- )
-
- def test_where(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- field_path = "foo"
- op_string = "=="
- value = 45
- query = collection.where(field_path, op_string, value)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(len(query._field_filters), 1)
- field_filter_pb = query._field_filters[0]
- self.assertEqual(
- field_filter_pb, self._make_field_filter_pb(field_path, op_string, value)
- )
-
- @staticmethod
- def _make_order_pb(field_path, direction):
- from google.cloud.firestore_v1beta1.proto import query_pb2
- from google.cloud.firestore_v1beta1.query import _enum_from_direction
-
- return query_pb2.StructuredQuery.Order(
- field=query_pb2.StructuredQuery.FieldReference(field_path=field_path),
- direction=_enum_from_direction(direction),
- )
-
- def test_order_by(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- field_path = "foo"
- direction = Query.DESCENDING
- query = collection.order_by(field_path, direction=direction)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(len(query._orders), 1)
- order_pb = query._orders[0]
- self.assertEqual(order_pb, self._make_order_pb(field_path, direction))
-
- def test_limit(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- limit = 15
- query = collection.limit(limit)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._limit, limit)
-
- def test_offset(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- offset = 113
- query = collection.offset(offset)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._offset, offset)
-
- def test_start_at(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"a": "b"}
- query = collection.start_at(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._start_at, (doc_fields, True))
-
- def test_start_after(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"d": "foo", "e": 10}
- query = collection.start_after(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._start_at, (doc_fields, False))
-
- def test_end_before(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"bar": 10.5}
- query = collection.end_before(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._end_at, (doc_fields, True))
-
- def test_end_at(self):
- from google.cloud.firestore_v1beta1.query import Query
-
- collection = self._make_one("collection")
- doc_fields = {"opportunity": True, "reason": 9}
- query = collection.end_at(doc_fields)
-
- self.assertIsInstance(query, Query)
- self.assertIs(query._parent, collection)
- self.assertEqual(query._end_at, (doc_fields, False))
-
- def _list_documents_helper(self, page_size=None):
- from google.api_core.page_iterator import Iterator
- from google.api_core.page_iterator import Page
- from google.cloud.firestore_v1beta1.document import DocumentReference
- from google.cloud.firestore_v1beta1.gapic.firestore_client import (
- FirestoreClient,
- )
- from google.cloud.firestore_v1beta1.proto.document_pb2 import Document
-
- class _Iterator(Iterator):
- def __init__(self, pages):
- super(_Iterator, self).__init__(client=None)
- self._pages = pages
-
- def _next_page(self):
- if self._pages:
- page, self._pages = self._pages[0], self._pages[1:]
- return Page(self, page, self.item_to_value)
-
- client = _make_client()
- template = client._database_string + "/documents/{}"
- document_ids = ["doc-1", "doc-2"]
- documents = [
- Document(name=template.format(document_id)) for document_id in document_ids
- ]
- iterator = _Iterator(pages=[documents])
- api_client = mock.create_autospec(FirestoreClient)
- api_client.list_documents.return_value = iterator
- client._firestore_api_internal = api_client
- collection = self._make_one("collection", client=client)
-
- if page_size is not None:
- documents = list(collection.list_documents(page_size=page_size))
- else:
- documents = list(collection.list_documents())
-
- # Verify the response and the mocks.
- self.assertEqual(len(documents), len(document_ids))
- for document, document_id in zip(documents, document_ids):
- self.assertIsInstance(document, DocumentReference)
- self.assertEqual(document.parent, collection)
- self.assertEqual(document.id, document_id)
-
- parent, _ = collection._parent_info()
- api_client.list_documents.assert_called_once_with(
- parent,
- collection.id,
- page_size=page_size,
- show_missing=True,
- metadata=client._rpc_metadata,
- )
-
- def test_list_documents_wo_page_size(self):
- self._list_documents_helper()
-
- def test_list_documents_w_page_size(self):
- self._list_documents_helper(page_size=25)
-
- @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True)
- def test_get(self, query_class):
- import warnings
-
- collection = self._make_one("collection")
- with warnings.catch_warnings(record=True) as warned:
- get_response = collection.get()
-
- query_class.assert_called_once_with(collection)
- query_instance = query_class.return_value
- self.assertIs(get_response, query_instance.stream.return_value)
- query_instance.stream.assert_called_once_with(transaction=None)
-
- # Verify the deprecation
- self.assertEqual(len(warned), 1)
- self.assertIs(warned[0].category, DeprecationWarning)
-
- @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True)
- def test_get_with_transaction(self, query_class):
- import warnings
-
- collection = self._make_one("collection")
- transaction = mock.sentinel.txn
- with warnings.catch_warnings(record=True) as warned:
- get_response = collection.get(transaction=transaction)
-
- query_class.assert_called_once_with(collection)
- query_instance = query_class.return_value
- self.assertIs(get_response, query_instance.stream.return_value)
- query_instance.stream.assert_called_once_with(transaction=transaction)
-
- # Verify the deprecation
- self.assertEqual(len(warned), 1)
- self.assertIs(warned[0].category, DeprecationWarning)
-
- @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True)
- def test_stream(self, query_class):
- collection = self._make_one("collection")
- stream_response = collection.stream()
-
- query_class.assert_called_once_with(collection)
- query_instance = query_class.return_value
- self.assertIs(stream_response, query_instance.stream.return_value)
- query_instance.stream.assert_called_once_with(transaction=None)
-
- @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True)
- def test_stream_with_transaction(self, query_class):
- collection = self._make_one("collection")
- transaction = mock.sentinel.txn
- stream_response = collection.stream(transaction=transaction)
-
- query_class.assert_called_once_with(collection)
- query_instance = query_class.return_value
- self.assertIs(stream_response, query_instance.stream.return_value)
- query_instance.stream.assert_called_once_with(transaction=transaction)
-
- @mock.patch("google.cloud.firestore_v1beta1.collection.Watch", autospec=True)
- def test_on_snapshot(self, watch):
- collection = self._make_one("collection")
- collection.on_snapshot(None)
- watch.for_query.assert_called_once()
-
-
-class Test__auto_id(unittest.TestCase):
- @staticmethod
- def _call_fut():
- from google.cloud.firestore_v1beta1.collection import _auto_id
-
- return _auto_id()
-
- @mock.patch("random.choice")
- def test_it(self, mock_rand_choice):
- from google.cloud.firestore_v1beta1.collection import _AUTO_ID_CHARS
-
- mock_result = "0123456789abcdefghij"
- mock_rand_choice.side_effect = list(mock_result)
- result = self._call_fut()
- self.assertEqual(result, mock_result)
-
- mock_calls = [mock.call(_AUTO_ID_CHARS)] * 20
- self.assertEqual(mock_rand_choice.mock_calls, mock_calls)
-
-
-def _make_credentials():
- import google.auth.credentials
-
- return mock.Mock(spec=google.auth.credentials.Credentials)
-
-
-def _make_client():
- from google.cloud.firestore_v1beta1.client import Client
-
- credentials = _make_credentials()
- with pytest.deprecated_call():
- return Client(project="project-project", credentials=credentials)
diff --git a/tests/unit/v1beta1/test_cross_language.py b/tests/unit/v1beta1/test_cross_language.py
deleted file mode 100644
index d04b71436f..0000000000
--- a/tests/unit/v1beta1/test_cross_language.py
+++ /dev/null
@@ -1,505 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import glob
-import json
-import os
-
-import mock
-import pytest
-
-from google.protobuf import text_format
-from google.cloud.firestore_v1beta1.proto import document_pb2
-from google.cloud.firestore_v1beta1.proto import firestore_pb2
-from google.cloud.firestore_v1beta1.proto import test_v1beta1_pb2
-from google.cloud.firestore_v1beta1.proto import write_pb2
-
-
-def _load_testproto(filename):
- with open(filename, "r") as tp_file:
- tp_text = tp_file.read()
- test_proto = test_v1beta1_pb2.Test()
- text_format.Merge(tp_text, test_proto)
- shortname = os.path.split(filename)[-1]
- test_proto.description = test_proto.description + " (%s)" % shortname
- return test_proto
-
-
-_here = os.path.dirname(__file__)
-_glob_expr = "{}/testdata/*.textproto".format(_here)
-_globs = glob.glob(_glob_expr)
-ALL_TESTPROTOS = [_load_testproto(filename) for filename in sorted(_globs)]
-
-_CREATE_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "create"
-]
-
-_GET_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "get"
-]
-
-_SET_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "set"
-]
-
-_UPDATE_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "update"
-]
-
-_UPDATE_PATHS_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "update_paths"
-]
-
-_DELETE_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "delete"
-]
-
-_LISTEN_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "listen"
-]
-
-_QUERY_TESTPROTOS = [
- test_proto
- for test_proto in ALL_TESTPROTOS
- if test_proto.WhichOneof("test") == "query"
-]
-
-
-def _mock_firestore_api():
- firestore_api = mock.Mock(spec=["commit"])
- commit_response = firestore_pb2.CommitResponse(
- write_results=[write_pb2.WriteResult()]
- )
- firestore_api.commit.return_value = commit_response
- return firestore_api
-
-
-def _make_client_document(firestore_api, testcase):
- from google.cloud.firestore_v1beta1 import Client
- from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE
- import google.auth.credentials
-
- _, project, _, database, _, doc_path = testcase.doc_ref_path.split("/", 5)
- assert database == DEFAULT_DATABASE
-
- # Attach the fake GAPIC to a real client.
- credentials = mock.Mock(spec=google.auth.credentials.Credentials)
-
- with pytest.deprecated_call():
- client = Client(project=project, credentials=credentials)
-
- client._firestore_api_internal = firestore_api
- return client, client.document(doc_path)
-
-
-def _run_testcase(testcase, call, firestore_api, client):
- if getattr(testcase, "is_error", False):
- # TODO: is there a subclass of Exception we can check for?
- with pytest.raises(Exception):
- call()
- else:
- call()
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- list(testcase.request.writes),
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
-
-@pytest.mark.parametrize("test_proto", _CREATE_TESTPROTOS)
-def test_create_testprotos(test_proto):
- testcase = test_proto.create
- firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
- data = convert_data(json.loads(testcase.json_data))
- call = functools.partial(document.create, data)
- _run_testcase(testcase, call, firestore_api, client)
-
-
-@pytest.mark.parametrize("test_proto", _GET_TESTPROTOS)
-def test_get_testprotos(test_proto):
- testcase = test_proto.get
- firestore_api = mock.Mock(spec=["get_document"])
- response = document_pb2.Document()
- firestore_api.get_document.return_value = response
- client, document = _make_client_document(firestore_api, testcase)
-
- document.get() # No '.textprotos' for errors, field_paths.
-
- firestore_api.get_document.assert_called_once_with(
- document._document_path,
- mask=None,
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
-
-@pytest.mark.parametrize("test_proto", _SET_TESTPROTOS)
-def test_set_testprotos(test_proto):
- testcase = test_proto.set
- firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
- data = convert_data(json.loads(testcase.json_data))
- if testcase.HasField("option"):
- merge = convert_set_option(testcase.option)
- else:
- merge = False
- call = functools.partial(document.set, data, merge=merge)
- _run_testcase(testcase, call, firestore_api, client)
-
-
-@pytest.mark.parametrize("test_proto", _UPDATE_TESTPROTOS)
-def test_update_testprotos(test_proto):
- testcase = test_proto.update
- firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
- data = convert_data(json.loads(testcase.json_data))
- if testcase.HasField("precondition"):
- option = convert_precondition(testcase.precondition)
- else:
- option = None
- call = functools.partial(document.update, data, option)
- _run_testcase(testcase, call, firestore_api, client)
-
-
-@pytest.mark.skip(reason="Python has no way to call update with a list of field paths.")
-@pytest.mark.parametrize("test_proto", _UPDATE_PATHS_TESTPROTOS)
-def test_update_paths_testprotos(test_proto): # pragma: NO COVER
- pass
-
-
-@pytest.mark.parametrize("test_proto", _DELETE_TESTPROTOS)
-def test_delete_testprotos(test_proto):
- testcase = test_proto.delete
- firestore_api = _mock_firestore_api()
- client, document = _make_client_document(firestore_api, testcase)
- if testcase.HasField("precondition"):
- option = convert_precondition(testcase.precondition)
- else:
- option = None
- call = functools.partial(document.delete, option)
- _run_testcase(testcase, call, firestore_api, client)
-
-
-@pytest.mark.parametrize("test_proto", _LISTEN_TESTPROTOS)
-def test_listen_testprotos(test_proto): # pragma: NO COVER
- # test_proto.listen has 'reponses' messages,
- # 'google.firestore.v1beta1.ListenResponse'
- # and then an expected list of 'snapshots' (local 'Snapshot'), containing
- # 'docs' (list of 'google.firestore.v1beta1.Document'),
- # 'changes' (list lof local 'DocChange', and 'read_time' timestamp.
- from google.cloud.firestore_v1beta1 import Client
- from google.cloud.firestore_v1beta1 import DocumentReference
- from google.cloud.firestore_v1beta1 import DocumentSnapshot
- from google.cloud.firestore_v1beta1 import Watch
- import google.auth.credentials
-
- testcase = test_proto.listen
- testname = test_proto.description
-
- credentials = mock.Mock(spec=google.auth.credentials.Credentials)
-
- with pytest.deprecated_call():
- client = Client(project="project", credentials=credentials)
-
- modulename = "google.cloud.firestore_v1beta1.watch"
- with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc):
- with mock.patch(
- "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer
- ):
- with mock.patch( # conformance data sets WATCH_TARGET_ID to 1
- "%s.WATCH_TARGET_ID" % modulename, 1
- ):
- snapshots = []
-
- def callback(keys, applied_changes, read_time):
- snapshots.append((keys, applied_changes, read_time))
-
- query = DummyQuery(client=client)
- watch = Watch.for_query(
- query, callback, DocumentSnapshot, DocumentReference
- )
- # conformance data has db string as this
- db_str = "projects/projectID/databases/(default)"
- watch._firestore._database_string_internal = db_str
-
- if testcase.is_error:
- try:
- for proto in testcase.responses:
- watch.on_snapshot(proto)
- except RuntimeError:
- # listen-target-add-wrong-id.textpro
- # listen-target-remove.textpro
- pass
-
- else:
- for proto in testcase.responses:
- watch.on_snapshot(proto)
-
- assert len(snapshots) == len(testcase.snapshots)
- for i, (expected_snapshot, actual_snapshot) in enumerate(
- zip(testcase.snapshots, snapshots)
- ):
- expected_changes = expected_snapshot.changes
- actual_changes = actual_snapshot[1]
- if len(expected_changes) != len(actual_changes):
- raise AssertionError(
- "change length mismatch in %s (snapshot #%s)"
- % (testname, i)
- )
- for y, (expected_change, actual_change) in enumerate(
- zip(expected_changes, actual_changes)
- ):
- expected_change_kind = expected_change.kind
- actual_change_kind = actual_change.type.value
- if expected_change_kind != actual_change_kind:
- raise AssertionError(
- "change type mismatch in %s (snapshot #%s, change #%s')"
- % (testname, i, y)
- )
-
-
-@pytest.mark.parametrize("test_proto", _QUERY_TESTPROTOS)
-def test_query_testprotos(test_proto): # pragma: NO COVER
- testcase = test_proto.query
- if testcase.is_error:
- with pytest.raises(Exception):
- query = parse_query(testcase)
- query._to_protobuf()
- else:
- query = parse_query(testcase)
- found = query._to_protobuf()
- assert found == testcase.query
-
-
-def convert_data(v):
- # Replace the strings 'ServerTimestamp' and 'Delete' with the corresponding
- # sentinels.
- from google.cloud.firestore_v1beta1 import ArrayRemove
- from google.cloud.firestore_v1beta1 import ArrayUnion
- from google.cloud.firestore_v1beta1 import DELETE_FIELD
- from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP
-
- if v == "ServerTimestamp":
- return SERVER_TIMESTAMP
- elif v == "Delete":
- return DELETE_FIELD
- elif isinstance(v, list):
- if v[0] == "ArrayRemove":
- return ArrayRemove([convert_data(e) for e in v[1:]])
- if v[0] == "ArrayUnion":
- return ArrayUnion([convert_data(e) for e in v[1:]])
- return [convert_data(e) for e in v]
- elif isinstance(v, dict):
- return {k: convert_data(v2) for k, v2 in v.items()}
- elif v == "NaN":
- return float(v)
- else:
- return v
-
-
-def convert_set_option(option):
- from google.cloud.firestore_v1beta1 import _helpers
-
- if option.fields:
- return [
- _helpers.FieldPath(*field.field).to_api_repr() for field in option.fields
- ]
-
- assert option.all
- return True
-
-
-def convert_precondition(precond):
- from google.cloud.firestore_v1beta1 import Client
-
- if precond.HasField("exists"):
- return Client.write_option(exists=precond.exists)
-
- assert precond.HasField("update_time")
- return Client.write_option(last_update_time=precond.update_time)
-
-
-class DummyRpc(object): # pragma: NO COVER
- def __init__(self, listen, initial_request, should_recover, metadata=None):
- self.listen = listen
- self.initial_request = initial_request
- self.should_recover = should_recover
- self.closed = False
- self.callbacks = []
- self._metadata = metadata
-
- def add_done_callback(self, callback):
- self.callbacks.append(callback)
-
- def close(self):
- self.closed = True
-
-
-class DummyBackgroundConsumer(object): # pragma: NO COVER
- started = False
- stopped = False
- is_active = True
-
- def __init__(self, rpc, on_snapshot):
- self._rpc = rpc
- self.on_snapshot = on_snapshot
-
- def start(self):
- self.started = True
-
- def stop(self):
- self.stopped = True
- self.is_active = False
-
-
-class DummyQuery(object): # pragma: NO COVER
- def __init__(self, **kw):
- self._client = kw["client"]
- self._comparator = lambda x, y: 1
-
- def _to_protobuf(self):
- from google.cloud.firestore_v1beta1.proto import query_pb2
-
- query_kwargs = {
- "select": None,
- "from": None,
- "where": None,
- "order_by": None,
- "start_at": None,
- "end_at": None,
- }
- return query_pb2.StructuredQuery(**query_kwargs)
-
-
-def parse_query(testcase):
- # 'query' testcase contains:
- # - 'coll_path': collection ref path.
- # - 'clauses': array of one or more 'Clause' elements
- # - 'query': the actual google.firestore.v1beta1.StructuredQuery message
- # to be constructed.
- # - 'is_error' (as other testcases).
- #
- # 'Clause' elements are unions of:
- # - 'select': [field paths]
- # - 'where': (field_path, op, json_value)
- # - 'order_by': (field_path, direction)
- # - 'offset': int
- # - 'limit': int
- # - 'start_at': 'Cursor'
- # - 'start_after': 'Cursor'
- # - 'end_at': 'Cursor'
- # - 'end_before': 'Cursor'
- #
- # 'Cursor' contains either:
- # - 'doc_snapshot': 'DocSnapshot'
- # - 'json_values': [string]
- #
- # 'DocSnapshot' contains:
- # 'path': str
- # 'json_data': str
- from google.auth.credentials import Credentials
- from google.cloud.firestore_v1beta1 import Client
- from google.cloud.firestore_v1beta1 import Query
-
- _directions = {"asc": Query.ASCENDING, "desc": Query.DESCENDING}
-
- credentials = mock.create_autospec(Credentials)
-
- with pytest.deprecated_call():
- client = Client("projectID", credentials)
-
- path = parse_path(testcase.coll_path)
- collection = client.collection(*path)
- query = collection
-
- for clause in testcase.clauses:
- kind = clause.WhichOneof("clause")
-
- if kind == "select":
- field_paths = [
- ".".join(field_path.field) for field_path in clause.select.fields
- ]
- query = query.select(field_paths)
- elif kind == "where":
- path = ".".join(clause.where.path.field)
- value = convert_data(json.loads(clause.where.json_value))
- query = query.where(path, clause.where.op, value)
- elif kind == "order_by":
- path = ".".join(clause.order_by.path.field)
- direction = clause.order_by.direction
- direction = _directions.get(direction, direction)
- query = query.order_by(path, direction=direction)
- elif kind == "offset":
- query = query.offset(clause.offset)
- elif kind == "limit":
- query = query.limit(clause.limit)
- elif kind == "start_at":
- cursor = parse_cursor(clause.start_at, client)
- query = query.start_at(cursor)
- elif kind == "start_after":
- cursor = parse_cursor(clause.start_after, client)
- query = query.start_after(cursor)
- elif kind == "end_at":
- cursor = parse_cursor(clause.end_at, client)
- query = query.end_at(cursor)
- elif kind == "end_before":
- cursor = parse_cursor(clause.end_before, client)
- query = query.end_before(cursor)
- else: # pragma: NO COVER
- raise ValueError("Unknown query clause: {}".format(kind))
-
- return query
-
-
-def parse_path(path):
- _, relative = path.split("documents/")
- return relative.split("/")
-
-
-def parse_cursor(cursor, client):
- from google.cloud.firestore_v1beta1 import DocumentReference
- from google.cloud.firestore_v1beta1 import DocumentSnapshot
-
- if cursor.HasField("doc_snapshot"):
- path = parse_path(cursor.doc_snapshot.path)
- doc_ref = DocumentReference(*path, client=client)
-
- return DocumentSnapshot(
- reference=doc_ref,
- data=json.loads(cursor.doc_snapshot.json_data),
- exists=True,
- read_time=None,
- create_time=None,
- update_time=None,
- )
-
- values = [json.loads(value) for value in cursor.json_values]
- return convert_data(values)
diff --git a/tests/unit/v1beta1/test_document.py b/tests/unit/v1beta1/test_document.py
deleted file mode 100644
index f9aca71344..0000000000
--- a/tests/unit/v1beta1/test_document.py
+++ /dev/null
@@ -1,830 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import unittest
-
-import mock
-import pytest
-
-
-class TestDocumentReference(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- return DocumentReference
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_constructor(self):
- collection_id1 = "users"
- document_id1 = "alovelace"
- collection_id2 = "platform"
- document_id2 = "*nix"
- client = mock.MagicMock()
- client.__hash__.return_value = 1234
-
- document = self._make_one(
- collection_id1, document_id1, collection_id2, document_id2, client=client
- )
- self.assertIs(document._client, client)
- expected_path = "/".join(
- (collection_id1, document_id1, collection_id2, document_id2)
- )
- self.assertEqual(document.path, expected_path)
-
- def test_constructor_invalid_path(self):
- with self.assertRaises(ValueError):
- self._make_one()
- with self.assertRaises(ValueError):
- self._make_one(None, "before", "bad-collection-id", "fifteen")
- with self.assertRaises(ValueError):
- self._make_one("bad-document-ID", None)
- with self.assertRaises(ValueError):
- self._make_one("Just", "A-Collection", "Sub")
-
- def test_constructor_invalid_kwarg(self):
- with self.assertRaises(TypeError):
- self._make_one("Coh-lek-shun", "Dahk-yu-mehnt", burger=18.75)
-
- def test___copy__(self):
- client = _make_client("rain")
- document = self._make_one("a", "b", client=client)
- # Access the document path so it is copied.
- doc_path = document._document_path
- self.assertEqual(doc_path, document._document_path_internal)
-
- new_document = document.__copy__()
- self.assertIsNot(new_document, document)
- self.assertIs(new_document._client, document._client)
- self.assertEqual(new_document._path, document._path)
- self.assertEqual(
- new_document._document_path_internal, document._document_path_internal
- )
-
- def test___deepcopy__calls_copy(self):
- client = mock.sentinel.client
- document = self._make_one("a", "b", client=client)
- document.__copy__ = mock.Mock(return_value=mock.sentinel.new_doc, spec=[])
-
- unused_memo = {}
- new_document = document.__deepcopy__(unused_memo)
- self.assertIs(new_document, mock.sentinel.new_doc)
- document.__copy__.assert_called_once_with()
-
- def test__eq__same_type(self):
- document1 = self._make_one("X", "YY", client=mock.sentinel.client)
- document2 = self._make_one("X", "ZZ", client=mock.sentinel.client)
- document3 = self._make_one("X", "YY", client=mock.sentinel.client2)
- document4 = self._make_one("X", "YY", client=mock.sentinel.client)
-
- pairs = ((document1, document2), (document1, document3), (document2, document3))
- for candidate1, candidate2 in pairs:
- # We use == explicitly since assertNotEqual would use !=.
- equality_val = candidate1 == candidate2
- self.assertFalse(equality_val)
-
- # Check the only equal one.
- self.assertEqual(document1, document4)
- self.assertIsNot(document1, document4)
-
- def test__eq__other_type(self):
- document = self._make_one("X", "YY", client=mock.sentinel.client)
- other = object()
- equality_val = document == other
- self.assertFalse(equality_val)
- self.assertIs(document.__eq__(other), NotImplemented)
-
- def test___hash__(self):
- client = mock.MagicMock()
- client.__hash__.return_value = 234566789
- document = self._make_one("X", "YY", client=client)
- self.assertEqual(hash(document), hash(("X", "YY")) + hash(client))
-
- def test__ne__same_type(self):
- document1 = self._make_one("X", "YY", client=mock.sentinel.client)
- document2 = self._make_one("X", "ZZ", client=mock.sentinel.client)
- document3 = self._make_one("X", "YY", client=mock.sentinel.client2)
- document4 = self._make_one("X", "YY", client=mock.sentinel.client)
-
- self.assertNotEqual(document1, document2)
- self.assertNotEqual(document1, document3)
- self.assertNotEqual(document2, document3)
-
- # We use != explicitly since assertEqual would use ==.
- inequality_val = document1 != document4
- self.assertFalse(inequality_val)
- self.assertIsNot(document1, document4)
-
- def test__ne__other_type(self):
- document = self._make_one("X", "YY", client=mock.sentinel.client)
- other = object()
- self.assertNotEqual(document, other)
- self.assertIs(document.__ne__(other), NotImplemented)
-
- def test__document_path_property(self):
- project = "hi-its-me-ok-bye"
- client = _make_client(project=project)
-
- collection_id = "then"
- document_id = "090909iii"
- document = self._make_one(collection_id, document_id, client=client)
- doc_path = document._document_path
- expected = "projects/{}/databases/{}/documents/{}/{}".format(
- project, client._database, collection_id, document_id
- )
- self.assertEqual(doc_path, expected)
- self.assertIs(document._document_path_internal, doc_path)
-
- # Make sure value is cached.
- document._document_path_internal = mock.sentinel.cached
- self.assertIs(document._document_path, mock.sentinel.cached)
-
- def test__document_path_property_no_client(self):
- document = self._make_one("hi", "bye")
- self.assertIsNone(document._client)
- with self.assertRaises(ValueError):
- getattr(document, "_document_path")
-
- self.assertIsNone(document._document_path_internal)
-
- def test_id_property(self):
- document_id = "867-5309"
- document = self._make_one("Co-lek-shun", document_id)
- self.assertEqual(document.id, document_id)
-
- def test_parent_property(self):
- from google.cloud.firestore_v1beta1.collection import CollectionReference
-
- collection_id = "grocery-store"
- document_id = "market"
- client = _make_client()
- document = self._make_one(collection_id, document_id, client=client)
-
- parent = document.parent
- self.assertIsInstance(parent, CollectionReference)
- self.assertIs(parent._client, client)
- self.assertEqual(parent._path, (collection_id,))
-
- def test_collection_factory(self):
- from google.cloud.firestore_v1beta1.collection import CollectionReference
-
- collection_id = "grocery-store"
- document_id = "market"
- new_collection = "fruits"
- client = _make_client()
- document = self._make_one(collection_id, document_id, client=client)
-
- child = document.collection(new_collection)
- self.assertIsInstance(child, CollectionReference)
- self.assertIs(child._client, client)
- self.assertEqual(child._path, (collection_id, document_id, new_collection))
-
- @staticmethod
- def _write_pb_for_create(document_path, document_data):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1 import _helpers
-
- return write_pb2.Write(
- update=document_pb2.Document(
- name=document_path, fields=_helpers.encode_dict(document_data)
- ),
- current_document=common_pb2.Precondition(exists=False),
- )
-
- @staticmethod
- def _make_commit_repsonse(write_results=None):
- from google.cloud.firestore_v1beta1.proto import firestore_pb2
-
- response = mock.create_autospec(firestore_pb2.CommitResponse)
- response.write_results = write_results or [mock.sentinel.write_result]
- response.commit_time = mock.sentinel.commit_time
- return response
-
- def test_create(self):
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
- firestore_api.commit.return_value = self._make_commit_repsonse()
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("dignity")
- client._firestore_api_internal = firestore_api
-
- # Actually make a document and call create().
- document = self._make_one("foo", "twelve", client=client)
- document_data = {"hello": "goodbye", "count": 99}
- write_result = document.create(document_data)
-
- # Verify the response and the mocks.
- self.assertIs(write_result, mock.sentinel.write_result)
- write_pb = self._write_pb_for_create(document._document_path, document_data)
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_create_empty(self):
- # Create a minimal fake GAPIC with a dummy response.
- from google.cloud.firestore_v1beta1.document import DocumentReference
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-
- firestore_api = mock.Mock(spec=["commit"])
- document_reference = mock.create_autospec(DocumentReference)
- snapshot = mock.create_autospec(DocumentSnapshot)
- snapshot.exists = True
- document_reference.get.return_value = snapshot
- firestore_api.commit.return_value = self._make_commit_repsonse(
- write_results=[document_reference]
- )
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("dignity")
- client._firestore_api_internal = firestore_api
- client.get_all = mock.MagicMock()
- client.get_all.exists.return_value = True
-
- # Actually make a document and call create().
- document = self._make_one("foo", "twelve", client=client)
- document_data = {}
- write_result = document.create(document_data)
- self.assertTrue(write_result.get().exists)
-
- @staticmethod
- def _write_pb_for_set(document_path, document_data, merge):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1 import _helpers
-
- write_pbs = write_pb2.Write(
- update=document_pb2.Document(
- name=document_path, fields=_helpers.encode_dict(document_data)
- )
- )
- if merge:
- field_paths = [
- field_path
- for field_path, value in _helpers.extract_fields(
- document_data, _helpers.FieldPath()
- )
- ]
- field_paths = [
- field_path.to_api_repr() for field_path in sorted(field_paths)
- ]
- mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
- write_pbs.update_mask.CopyFrom(mask)
- return write_pbs
-
- def _set_helper(self, merge=False, **option_kwargs):
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
- firestore_api.commit.return_value = self._make_commit_repsonse()
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("db-dee-bee")
- client._firestore_api_internal = firestore_api
-
- # Actually make a document and call create().
- document = self._make_one("User", "Interface", client=client)
- document_data = {"And": 500, "Now": b"\xba\xaa\xaa \xba\xaa\xaa"}
- write_result = document.set(document_data, merge)
-
- # Verify the response and the mocks.
- self.assertIs(write_result, mock.sentinel.write_result)
- write_pb = self._write_pb_for_set(document._document_path, document_data, merge)
-
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_set(self):
- self._set_helper()
-
- def test_set_merge(self):
- self._set_helper(merge=True)
-
- @staticmethod
- def _write_pb_for_update(document_path, update_values, field_paths):
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
- from google.cloud.firestore_v1beta1 import _helpers
-
- return write_pb2.Write(
- update=document_pb2.Document(
- name=document_path, fields=_helpers.encode_dict(update_values)
- ),
- update_mask=common_pb2.DocumentMask(field_paths=field_paths),
- current_document=common_pb2.Precondition(exists=True),
- )
-
- def _update_helper(self, **option_kwargs):
- from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD
-
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
- firestore_api.commit.return_value = self._make_commit_repsonse()
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("potato-chip")
- client._firestore_api_internal = firestore_api
-
- # Actually make a document and call create().
- document = self._make_one("baked", "Alaska", client=client)
- # "Cheat" and use OrderedDict-s so that iteritems() is deterministic.
- field_updates = collections.OrderedDict(
- (("hello", 1), ("then.do", False), ("goodbye", DELETE_FIELD))
- )
- if option_kwargs:
- option = client.write_option(**option_kwargs)
- write_result = document.update(field_updates, option=option)
- else:
- option = None
- write_result = document.update(field_updates)
-
- # Verify the response and the mocks.
- self.assertIs(write_result, mock.sentinel.write_result)
- update_values = {
- "hello": field_updates["hello"],
- "then": {"do": field_updates["then.do"]},
- }
- field_paths = list(field_updates.keys())
- write_pb = self._write_pb_for_update(
- document._document_path, update_values, sorted(field_paths)
- )
- if option is not None:
- option.modify_write(write_pb)
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_update_with_exists(self):
- with self.assertRaises(ValueError):
- self._update_helper(exists=True)
-
- def test_update(self):
- self._update_helper()
-
- def test_update_with_precondition(self):
- from google.protobuf import timestamp_pb2
-
- timestamp = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244)
- self._update_helper(last_update_time=timestamp)
-
- def test_empty_update(self):
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
- firestore_api.commit.return_value = self._make_commit_repsonse()
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("potato-chip")
- client._firestore_api_internal = firestore_api
-
- # Actually make a document and call create().
- document = self._make_one("baked", "Alaska", client=client)
- # "Cheat" and use OrderedDict-s so that iteritems() is deterministic.
- field_updates = {}
- with self.assertRaises(ValueError):
- document.update(field_updates)
-
- def _delete_helper(self, **option_kwargs):
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- # Create a minimal fake GAPIC with a dummy response.
- firestore_api = mock.Mock(spec=["commit"])
- firestore_api.commit.return_value = self._make_commit_repsonse()
-
- # Attach the fake GAPIC to a real client.
- client = _make_client("donut-base")
- client._firestore_api_internal = firestore_api
-
- # Actually make a document and call delete().
- document = self._make_one("where", "we-are", client=client)
- if option_kwargs:
- option = client.write_option(**option_kwargs)
- delete_time = document.delete(option=option)
- else:
- option = None
- delete_time = document.delete()
-
- # Verify the response and the mocks.
- self.assertIs(delete_time, mock.sentinel.commit_time)
- write_pb = write_pb2.Write(delete=document._document_path)
- if option is not None:
- option.modify_write(write_pb)
- firestore_api.commit.assert_called_once_with(
- client._database_string,
- [write_pb],
- transaction=None,
- metadata=client._rpc_metadata,
- )
-
- def test_delete(self):
- self._delete_helper()
-
- def test_delete_with_option(self):
- from google.protobuf import timestamp_pb2
-
- timestamp_pb = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244)
- self._delete_helper(last_update_time=timestamp_pb)
-
- def _get_helper(self, field_paths=None, use_transaction=False, not_found=False):
- from google.api_core.exceptions import NotFound
- from google.cloud.firestore_v1beta1.proto import common_pb2
- from google.cloud.firestore_v1beta1.proto import document_pb2
- from google.cloud.firestore_v1beta1.transaction import Transaction
-
- # Create a minimal fake GAPIC with a dummy response.
- create_time = 123
- update_time = 234
- firestore_api = mock.Mock(spec=["get_document"])
- response = mock.create_autospec(document_pb2.Document)
- response.fields = {}
- response.create_time = create_time
- response.update_time = update_time
-
- if not_found:
- firestore_api.get_document.side_effect = NotFound("testing")
- else:
- firestore_api.get_document.return_value = response
-
- client = _make_client("donut-base")
- client._firestore_api_internal = firestore_api
-
- document = self._make_one("where", "we-are", client=client)
-
- if use_transaction:
- transaction = Transaction(client)
- transaction_id = transaction._id = b"asking-me-2"
- else:
- transaction = None
-
- snapshot = document.get(field_paths=field_paths, transaction=transaction)
-
- self.assertIs(snapshot.reference, document)
- if not_found:
- self.assertIsNone(snapshot._data)
- self.assertFalse(snapshot.exists)
- self.assertIsNone(snapshot.read_time)
- self.assertIsNone(snapshot.create_time)
- self.assertIsNone(snapshot.update_time)
- else:
- self.assertEqual(snapshot.to_dict(), {})
- self.assertTrue(snapshot.exists)
- self.assertIsNone(snapshot.read_time)
- self.assertIs(snapshot.create_time, create_time)
- self.assertIs(snapshot.update_time, update_time)
-
- # Verify the request made to the API
- if field_paths is not None:
- mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
- else:
- mask = None
-
- if use_transaction:
- expected_transaction_id = transaction_id
- else:
- expected_transaction_id = None
-
- firestore_api.get_document.assert_called_once_with(
- document._document_path,
- mask=mask,
- transaction=expected_transaction_id,
- metadata=client._rpc_metadata,
- )
-
- def test_get_not_found(self):
- self._get_helper(not_found=True)
-
- def test_get_default(self):
- self._get_helper()
-
- def test_get_w_string_field_path(self):
- with self.assertRaises(ValueError):
- self._get_helper(field_paths="foo")
-
- def test_get_with_field_path(self):
- self._get_helper(field_paths=["foo"])
-
- def test_get_with_multiple_field_paths(self):
- self._get_helper(field_paths=["foo", "bar.baz"])
-
- def test_get_with_transaction(self):
- self._get_helper(use_transaction=True)
-
- def _collections_helper(self, page_size=None):
- from google.api_core.page_iterator import Iterator
- from google.api_core.page_iterator import Page
- from google.cloud.firestore_v1beta1.collection import CollectionReference
- from google.cloud.firestore_v1beta1.gapic.firestore_client import (
- FirestoreClient,
- )
-
- class _Iterator(Iterator):
- def __init__(self, pages):
- super(_Iterator, self).__init__(client=None)
- self._pages = pages
-
- def _next_page(self):
- if self._pages:
- page, self._pages = self._pages[0], self._pages[1:]
- return Page(self, page, self.item_to_value)
-
- collection_ids = ["coll-1", "coll-2"]
- iterator = _Iterator(pages=[collection_ids])
- api_client = mock.create_autospec(FirestoreClient)
- api_client.list_collection_ids.return_value = iterator
-
- client = _make_client()
- client._firestore_api_internal = api_client
-
- # Actually make a document and call delete().
- document = self._make_one("where", "we-are", client=client)
- if page_size is not None:
- collections = list(document.collections(page_size=page_size))
- else:
- collections = list(document.collections())
-
- # Verify the response and the mocks.
- self.assertEqual(len(collections), len(collection_ids))
- for collection, collection_id in zip(collections, collection_ids):
- self.assertIsInstance(collection, CollectionReference)
- self.assertEqual(collection.parent, document)
- self.assertEqual(collection.id, collection_id)
-
- api_client.list_collection_ids.assert_called_once_with(
- document._document_path, page_size=page_size, metadata=client._rpc_metadata
- )
-
- def test_collections_wo_page_size(self):
- self._collections_helper()
-
- def test_collections_w_page_size(self):
- self._collections_helper(page_size=10)
-
- @mock.patch("google.cloud.firestore_v1beta1.document.Watch", autospec=True)
- def test_on_snapshot(self, watch):
- client = mock.Mock(_database_string="sprinklez", spec=["_database_string"])
- document = self._make_one("yellow", "mellow", client=client)
- document.on_snapshot(None)
- watch.for_document.assert_called_once()
-
-
-class TestDocumentSnapshot(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.document import DocumentSnapshot
-
- return DocumentSnapshot
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def _make_reference(self, *args, **kwargs):
- from google.cloud.firestore_v1beta1.document import DocumentReference
-
- return DocumentReference(*args, **kwargs)
-
- def _make_w_ref(self, ref_path=("a", "b"), data={}, exists=True):
- client = mock.sentinel.client
- reference = self._make_reference(*ref_path, client=client)
- return self._make_one(
- reference,
- data,
- exists,
- mock.sentinel.read_time,
- mock.sentinel.create_time,
- mock.sentinel.update_time,
- )
-
- def test_constructor(self):
- client = mock.sentinel.client
- reference = self._make_reference("hi", "bye", client=client)
- data = {"zoop": 83}
- snapshot = self._make_one(
- reference,
- data,
- True,
- mock.sentinel.read_time,
- mock.sentinel.create_time,
- mock.sentinel.update_time,
- )
- self.assertIs(snapshot._reference, reference)
- self.assertEqual(snapshot._data, data)
- self.assertIsNot(snapshot._data, data) # Make sure copied.
- self.assertTrue(snapshot._exists)
- self.assertIs(snapshot.read_time, mock.sentinel.read_time)
- self.assertIs(snapshot.create_time, mock.sentinel.create_time)
- self.assertIs(snapshot.update_time, mock.sentinel.update_time)
-
- def test___eq___other_type(self):
- snapshot = self._make_w_ref()
- other = object()
- self.assertFalse(snapshot == other)
-
- def test___eq___different_reference_same_data(self):
- snapshot = self._make_w_ref(("a", "b"))
- other = self._make_w_ref(("c", "d"))
- self.assertFalse(snapshot == other)
-
- def test___eq___same_reference_different_data(self):
- snapshot = self._make_w_ref(("a", "b"))
- other = self._make_w_ref(("a", "b"), {"foo": "bar"})
- self.assertFalse(snapshot == other)
-
- def test___eq___same_reference_same_data(self):
- snapshot = self._make_w_ref(("a", "b"), {"foo": "bar"})
- other = self._make_w_ref(("a", "b"), {"foo": "bar"})
- self.assertTrue(snapshot == other)
-
- def test___hash__(self):
- from google.protobuf import timestamp_pb2
-
- client = mock.MagicMock()
- client.__hash__.return_value = 234566789
- reference = self._make_reference("hi", "bye", client=client)
- data = {"zoop": 83}
- update_time = timestamp_pb2.Timestamp(seconds=123456, nanos=123456789)
- snapshot = self._make_one(
- reference, data, True, None, mock.sentinel.create_time, update_time
- )
- self.assertEqual(
- hash(snapshot), hash(reference) + hash(123456) + hash(123456789)
- )
-
- def test__client_property(self):
- reference = self._make_reference(
- "ok", "fine", "now", "fore", client=mock.sentinel.client
- )
- snapshot = self._make_one(reference, {}, False, None, None, None)
- self.assertIs(snapshot._client, mock.sentinel.client)
-
- def test_exists_property(self):
- reference = mock.sentinel.reference
-
- snapshot1 = self._make_one(reference, {}, False, None, None, None)
- self.assertFalse(snapshot1.exists)
- snapshot2 = self._make_one(reference, {}, True, None, None, None)
- self.assertTrue(snapshot2.exists)
-
- def test_id_property(self):
- document_id = "around"
- reference = self._make_reference(
- "look", document_id, client=mock.sentinel.client
- )
- snapshot = self._make_one(reference, {}, True, None, None, None)
- self.assertEqual(snapshot.id, document_id)
- self.assertEqual(reference.id, document_id)
-
- def test_reference_property(self):
- snapshot = self._make_one(mock.sentinel.reference, {}, True, None, None, None)
- self.assertIs(snapshot.reference, mock.sentinel.reference)
-
- def test_get(self):
- data = {"one": {"bold": "move"}}
- snapshot = self._make_one(None, data, True, None, None, None)
-
- first_read = snapshot.get("one")
- second_read = snapshot.get("one")
- self.assertEqual(first_read, data.get("one"))
- self.assertIsNot(first_read, data.get("one"))
- self.assertEqual(first_read, second_read)
- self.assertIsNot(first_read, second_read)
-
- with self.assertRaises(KeyError):
- snapshot.get("two")
-
- def test_nonexistent_snapshot(self):
- snapshot = self._make_one(None, None, False, None, None, None)
- self.assertIsNone(snapshot.get("one"))
-
- def test_to_dict(self):
- data = {"a": 10, "b": ["definitely", "mutable"], "c": {"45": 50}}
- snapshot = self._make_one(None, data, True, None, None, None)
- as_dict = snapshot.to_dict()
- self.assertEqual(as_dict, data)
- self.assertIsNot(as_dict, data)
- # Check that the data remains unchanged.
- as_dict["b"].append("hi")
- self.assertEqual(data, snapshot.to_dict())
- self.assertNotEqual(data, as_dict)
-
- def test_non_existent(self):
- snapshot = self._make_one(None, None, False, None, None, None)
- as_dict = snapshot.to_dict()
- self.assertIsNone(as_dict)
-
-
-class Test__get_document_path(unittest.TestCase):
- @staticmethod
- def _call_fut(client, path):
- from google.cloud.firestore_v1beta1.document import _get_document_path
-
- return _get_document_path(client, path)
-
- def test_it(self):
- project = "prah-jekt"
- client = _make_client(project=project)
- path = ("Some", "Document", "Child", "Shockument")
- document_path = self._call_fut(client, path)
-
- expected = "projects/{}/databases/{}/documents/{}".format(
- project, client._database, "/".join(path)
- )
- self.assertEqual(document_path, expected)
-
-
-class Test__consume_single_get(unittest.TestCase):
- @staticmethod
- def _call_fut(response_iterator):
- from google.cloud.firestore_v1beta1.document import _consume_single_get
-
- return _consume_single_get(response_iterator)
-
- def test_success(self):
- response_iterator = iter([mock.sentinel.result])
- result = self._call_fut(response_iterator)
- self.assertIs(result, mock.sentinel.result)
-
- def test_failure_not_enough(self):
- response_iterator = iter([])
- with self.assertRaises(ValueError):
- self._call_fut(response_iterator)
-
- def test_failure_too_many(self):
- response_iterator = iter([None, None])
- with self.assertRaises(ValueError):
- self._call_fut(response_iterator)
-
-
-class Test__first_write_result(unittest.TestCase):
- @staticmethod
- def _call_fut(write_results):
- from google.cloud.firestore_v1beta1.document import _first_write_result
-
- return _first_write_result(write_results)
-
- def test_success(self):
- from google.protobuf import timestamp_pb2
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- single_result = write_pb2.WriteResult(
- update_time=timestamp_pb2.Timestamp(seconds=1368767504, nanos=458000123)
- )
- write_results = [single_result]
- result = self._call_fut(write_results)
- self.assertIs(result, single_result)
-
- def test_failure_not_enough(self):
- write_results = []
- with self.assertRaises(ValueError):
- self._call_fut(write_results)
-
- def test_more_than_one(self):
- from google.cloud.firestore_v1beta1.proto import write_pb2
-
- result1 = write_pb2.WriteResult()
- result2 = write_pb2.WriteResult()
- write_results = [result1, result2]
- result = self._call_fut(write_results)
- self.assertIs(result, result1)
-
-
-def _make_credentials():
- import google.auth.credentials
-
- return mock.Mock(spec=google.auth.credentials.Credentials)
-
-
-def _make_client(project="project-project"):
- from google.cloud.firestore_v1beta1.client import Client
-
- credentials = _make_credentials()
-
- with pytest.deprecated_call():
- return Client(project=project, credentials=credentials)
diff --git a/tests/unit/v1beta1/test_field_path.py b/tests/unit/v1beta1/test_field_path.py
deleted file mode 100644
index 22f314e612..0000000000
--- a/tests/unit/v1beta1/test_field_path.py
+++ /dev/null
@@ -1,495 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-
-class Test__tokenize_field_path(unittest.TestCase):
- @staticmethod
- def _call_fut(path):
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path._tokenize_field_path(path)
-
- def _expect(self, path, split_path):
- self.assertEqual(list(self._call_fut(path)), split_path)
-
- def test_w_empty(self):
- self._expect("", [])
-
- def test_w_single_dot(self):
- self._expect(".", ["."])
-
- def test_w_single_simple(self):
- self._expect("abc", ["abc"])
-
- def test_w_single_quoted(self):
- self._expect("`c*de`", ["`c*de`"])
-
- def test_w_quoted_embedded_dot(self):
- self._expect("`c*.de`", ["`c*.de`"])
-
- def test_w_quoted_escaped_backtick(self):
- self._expect(r"`c*\`de`", [r"`c*\`de`"])
-
- def test_w_dotted_quoted(self):
- self._expect("`*`.`~`", ["`*`", ".", "`~`"])
-
- def test_w_dotted(self):
- self._expect("a.b.`c*de`", ["a", ".", "b", ".", "`c*de`"])
-
- def test_w_dotted_escaped(self):
- self._expect("_0.`1`.`+2`", ["_0", ".", "`1`", ".", "`+2`"])
-
- def test_w_unconsumed_characters(self):
- path = "a~b"
- with self.assertRaises(ValueError):
- list(self._call_fut(path))
-
-
-class Test_split_field_path(unittest.TestCase):
- @staticmethod
- def _call_fut(path):
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path.split_field_path(path)
-
- def test_w_single_dot(self):
- with self.assertRaises(ValueError):
- self._call_fut(".")
-
- def test_w_leading_dot(self):
- with self.assertRaises(ValueError):
- self._call_fut(".a.b.c")
-
- def test_w_trailing_dot(self):
- with self.assertRaises(ValueError):
- self._call_fut("a.b.")
-
- def test_w_missing_dot(self):
- with self.assertRaises(ValueError):
- self._call_fut("a`c*de`f")
-
- def test_w_half_quoted_field(self):
- with self.assertRaises(ValueError):
- self._call_fut("`c*de")
-
- def test_w_empty(self):
- self.assertEqual(self._call_fut(""), [])
-
- def test_w_simple_field(self):
- self.assertEqual(self._call_fut("a"), ["a"])
-
- def test_w_dotted_field(self):
- self.assertEqual(self._call_fut("a.b.cde"), ["a", "b", "cde"])
-
- def test_w_quoted_field(self):
- self.assertEqual(self._call_fut("a.b.`c*de`"), ["a", "b", "`c*de`"])
-
- def test_w_quoted_field_escaped_backtick(self):
- self.assertEqual(self._call_fut(r"`c*\`de`"), [r"`c*\`de`"])
-
-
-class Test_parse_field_path(unittest.TestCase):
- @staticmethod
- def _call_fut(path):
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path.parse_field_path(path)
-
- def test_wo_escaped_names(self):
- self.assertEqual(self._call_fut("a.b.c"), ["a", "b", "c"])
-
- def test_w_escaped_backtick(self):
- self.assertEqual(self._call_fut("`a\\`b`.c.d"), ["a`b", "c", "d"])
-
- def test_w_escaped_backslash(self):
- self.assertEqual(self._call_fut("`a\\\\b`.c.d"), ["a\\b", "c", "d"])
-
- def test_w_first_name_escaped_wo_closing_backtick(self):
- with self.assertRaises(ValueError):
- self._call_fut("`a\\`b.c.d")
-
-
-class Test_render_field_path(unittest.TestCase):
- @staticmethod
- def _call_fut(field_names):
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path.render_field_path(field_names)
-
- def test_w_empty(self):
- self.assertEqual(self._call_fut([]), "")
-
- def test_w_one_simple(self):
- self.assertEqual(self._call_fut(["a"]), "a")
-
- def test_w_one_starts_w_digit(self):
- self.assertEqual(self._call_fut(["0abc"]), "`0abc`")
-
- def test_w_one_w_non_alphanum(self):
- self.assertEqual(self._call_fut(["a b c"]), "`a b c`")
-
- def test_w_one_w_backtick(self):
- self.assertEqual(self._call_fut(["a`b"]), "`a\\`b`")
-
- def test_w_one_w_backslash(self):
- self.assertEqual(self._call_fut(["a\\b"]), "`a\\\\b`")
-
- def test_multiple(self):
- self.assertEqual(self._call_fut(["a", "b", "c"]), "a.b.c")
-
-
-class Test_get_nested_value(unittest.TestCase):
-
- DATA = {
- "top1": {"middle2": {"bottom3": 20, "bottom4": 22}, "middle5": True},
- "top6": b"\x00\x01 foo",
- }
-
- @staticmethod
- def _call_fut(path, data):
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path.get_nested_value(path, data)
-
- def test_simple(self):
- self.assertIs(self._call_fut("top1", self.DATA), self.DATA["top1"])
-
- def test_nested(self):
- self.assertIs(
- self._call_fut("top1.middle2", self.DATA), self.DATA["top1"]["middle2"]
- )
- self.assertIs(
- self._call_fut("top1.middle2.bottom3", self.DATA),
- self.DATA["top1"]["middle2"]["bottom3"],
- )
-
- def test_missing_top_level(self):
- from google.cloud.firestore_v1beta1.field_path import _FIELD_PATH_MISSING_TOP
-
- field_path = "top8"
- with self.assertRaises(KeyError) as exc_info:
- self._call_fut(field_path, self.DATA)
-
- err_msg = _FIELD_PATH_MISSING_TOP.format(field_path)
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
- def test_missing_key(self):
- from google.cloud.firestore_v1beta1.field_path import _FIELD_PATH_MISSING_KEY
-
- with self.assertRaises(KeyError) as exc_info:
- self._call_fut("top1.middle2.nope", self.DATA)
-
- err_msg = _FIELD_PATH_MISSING_KEY.format("nope", "top1.middle2")
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
- def test_bad_type(self):
- from google.cloud.firestore_v1beta1.field_path import _FIELD_PATH_WRONG_TYPE
-
- with self.assertRaises(KeyError) as exc_info:
- self._call_fut("top6.middle7", self.DATA)
-
- err_msg = _FIELD_PATH_WRONG_TYPE.format("top6", "middle7")
- self.assertEqual(exc_info.exception.args, (err_msg,))
-
-
-class TestFieldPath(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1 import field_path
-
- return field_path.FieldPath
-
- def _make_one(self, *args):
- klass = self._get_target_class()
- return klass(*args)
-
- def test_ctor_w_none_in_part(self):
- with self.assertRaises(ValueError):
- self._make_one("a", None, "b")
-
- def test_ctor_w_empty_string_in_part(self):
- with self.assertRaises(ValueError):
- self._make_one("a", "", "b")
-
- def test_ctor_w_integer_part(self):
- with self.assertRaises(ValueError):
- self._make_one("a", 3, "b")
-
- def test_ctor_w_list(self):
- parts = ["a", "b", "c"]
- with self.assertRaises(ValueError):
- self._make_one(parts)
-
- def test_ctor_w_tuple(self):
- parts = ("a", "b", "c")
- with self.assertRaises(ValueError):
- self._make_one(parts)
-
- def test_ctor_w_iterable_part(self):
- with self.assertRaises(ValueError):
- self._make_one("a", ["a"], "b")
-
- def test_constructor_w_single_part(self):
- field_path = self._make_one("a")
- self.assertEqual(field_path.parts, ("a",))
-
- def test_constructor_w_multiple_parts(self):
- field_path = self._make_one("a", "b", "c")
- self.assertEqual(field_path.parts, ("a", "b", "c"))
-
- def test_ctor_w_invalid_chars_in_part(self):
- invalid_parts = ("~", "*", "/", "[", "]", ".")
- for invalid_part in invalid_parts:
- field_path = self._make_one(invalid_part)
- self.assertEqual(field_path.parts, (invalid_part,))
-
- def test_ctor_w_double_dots(self):
- field_path = self._make_one("a..b")
- self.assertEqual(field_path.parts, ("a..b",))
-
- def test_ctor_w_unicode(self):
- field_path = self._make_one("一", "二", "三")
- self.assertEqual(field_path.parts, ("一", "二", "三"))
-
- def test_from_api_repr_w_empty_string(self):
- api_repr = ""
- with self.assertRaises(ValueError):
- self._get_target_class().from_api_repr(api_repr)
-
- def test_from_api_repr_w_empty_field_name(self):
- api_repr = "a..b"
- with self.assertRaises(ValueError):
- self._get_target_class().from_api_repr(api_repr)
-
- def test_from_api_repr_w_invalid_chars(self):
- invalid_parts = ("~", "*", "/", "[", "]", ".")
- for invalid_part in invalid_parts:
- with self.assertRaises(ValueError):
- self._get_target_class().from_api_repr(invalid_part)
-
- def test_from_api_repr_w_ascii_single(self):
- api_repr = "a"
- field_path = self._get_target_class().from_api_repr(api_repr)
- self.assertEqual(field_path.parts, ("a",))
-
- def test_from_api_repr_w_ascii_dotted(self):
- api_repr = "a.b.c"
- field_path = self._get_target_class().from_api_repr(api_repr)
- self.assertEqual(field_path.parts, ("a", "b", "c"))
-
- def test_from_api_repr_w_non_ascii_dotted_non_quoted(self):
- api_repr = "a.一"
- with self.assertRaises(ValueError):
- self._get_target_class().from_api_repr(api_repr)
-
- def test_from_api_repr_w_non_ascii_dotted_quoted(self):
- api_repr = "a.`一`"
- field_path = self._get_target_class().from_api_repr(api_repr)
- self.assertEqual(field_path.parts, ("a", "一"))
-
- def test_from_string_w_empty_string(self):
- path_string = ""
- with self.assertRaises(ValueError):
- self._get_target_class().from_string(path_string)
-
- def test_from_string_w_empty_field_name(self):
- path_string = "a..b"
- with self.assertRaises(ValueError):
- self._get_target_class().from_string(path_string)
-
- def test_from_string_w_leading_dot(self):
- path_string = ".b.c"
- with self.assertRaises(ValueError):
- self._get_target_class().from_string(path_string)
-
- def test_from_string_w_trailing_dot(self):
- path_string = "a.b."
- with self.assertRaises(ValueError):
- self._get_target_class().from_string(path_string)
-
- def test_from_string_w_leading_invalid_chars(self):
- invalid_paths = ("~", "*", "/", "[", "]")
- for invalid_path in invalid_paths:
- field_path = self._get_target_class().from_string(invalid_path)
- self.assertEqual(field_path.parts, (invalid_path,))
-
- def test_from_string_w_embedded_invalid_chars(self):
- invalid_paths = ("a~b", "x*y", "f/g", "h[j", "k]l")
- for invalid_path in invalid_paths:
- with self.assertRaises(ValueError):
- self._get_target_class().from_string(invalid_path)
-
- def test_from_string_w_ascii_single(self):
- path_string = "a"
- field_path = self._get_target_class().from_string(path_string)
- self.assertEqual(field_path.parts, ("a",))
-
- def test_from_string_w_ascii_dotted(self):
- path_string = "a.b.c"
- field_path = self._get_target_class().from_string(path_string)
- self.assertEqual(field_path.parts, ("a", "b", "c"))
-
- def test_from_string_w_non_ascii_dotted(self):
- path_string = "a.一"
- field_path = self._get_target_class().from_string(path_string)
- self.assertEqual(field_path.parts, ("a", "一"))
-
- def test___hash___w_single_part(self):
- field_path = self._make_one("a")
- self.assertEqual(hash(field_path), hash("a"))
-
- def test___hash___w_multiple_parts(self):
- field_path = self._make_one("a", "b")
- self.assertEqual(hash(field_path), hash("a.b"))
-
- def test___hash___w_escaped_parts(self):
- field_path = self._make_one("a", "3")
- self.assertEqual(hash(field_path), hash("a.`3`"))
-
- def test___eq___w_matching_type(self):
- field_path = self._make_one("a", "b")
- string_path = self._get_target_class().from_string("a.b")
- self.assertEqual(field_path, string_path)
-
- def test___eq___w_non_matching_type(self):
- field_path = self._make_one("a", "c")
- other = mock.Mock()
- other.parts = "a", "b"
- self.assertNotEqual(field_path, other)
-
- def test___lt___w_matching_type(self):
- field_path = self._make_one("a", "b")
- string_path = self._get_target_class().from_string("a.c")
- self.assertTrue(field_path < string_path)
-
- def test___lt___w_non_matching_type(self):
- field_path = self._make_one("a", "b")
- other = object()
- # Python 2 doesn't raise TypeError here, but Python3 does.
- self.assertIs(field_path.__lt__(other), NotImplemented)
-
- def test___add__(self):
- path1 = "a123", "b456"
- path2 = "c789", "d012"
- path3 = "c789.d012"
- field_path1 = self._make_one(*path1)
- field_path1_string = self._make_one(*path1)
- field_path2 = self._make_one(*path2)
- field_path1 += field_path2
- field_path1_string += path3
- field_path2 = field_path2 + self._make_one(*path1)
- self.assertEqual(field_path1, self._make_one(*(path1 + path2)))
- self.assertEqual(field_path2, self._make_one(*(path2 + path1)))
- self.assertEqual(field_path1_string, field_path1)
- self.assertNotEqual(field_path1, field_path2)
- with self.assertRaises(TypeError):
- field_path1 + 305
-
- def test_to_api_repr_a(self):
- parts = "a"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "a")
-
- def test_to_api_repr_backtick(self):
- parts = "`"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), r"`\``")
-
- def test_to_api_repr_dot(self):
- parts = "."
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "`.`")
-
- def test_to_api_repr_slash(self):
- parts = "\\"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), r"`\\`")
-
- def test_to_api_repr_double_slash(self):
- parts = r"\\"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), r"`\\\\`")
-
- def test_to_api_repr_underscore(self):
- parts = "_33132"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "_33132")
-
- def test_to_api_repr_unicode_non_simple(self):
- parts = "一"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "`一`")
-
- def test_to_api_repr_number_non_simple(self):
- parts = "03"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "`03`")
-
- def test_to_api_repr_simple_with_dot(self):
- field_path = self._make_one("a.b")
- self.assertEqual(field_path.to_api_repr(), "`a.b`")
-
- def test_to_api_repr_non_simple_with_dot(self):
- parts = "a.一"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "`a.一`")
-
- def test_to_api_repr_simple(self):
- parts = "a0332432"
- field_path = self._make_one(parts)
- self.assertEqual(field_path.to_api_repr(), "a0332432")
-
- def test_to_api_repr_chain(self):
- parts = "a", "`", "\\", "_3", "03", "a03", "\\\\", "a0332432", "一"
- field_path = self._make_one(*parts)
- self.assertEqual(
- field_path.to_api_repr(), r"a.`\``.`\\`._3.`03`.a03.`\\\\`.a0332432.`一`"
- )
-
- def test_eq_or_parent_same(self):
- field_path = self._make_one("a", "b")
- other = self._make_one("a", "b")
- self.assertTrue(field_path.eq_or_parent(other))
-
- def test_eq_or_parent_prefix(self):
- field_path = self._make_one("a", "b")
- other = self._make_one("a", "b", "c")
- self.assertTrue(field_path.eq_or_parent(other))
- self.assertTrue(other.eq_or_parent(field_path))
-
- def test_eq_or_parent_no_prefix(self):
- field_path = self._make_one("a", "b")
- other = self._make_one("d", "e", "f")
- self.assertFalse(field_path.eq_or_parent(other))
- self.assertFalse(other.eq_or_parent(field_path))
-
- def test_lineage_empty(self):
- field_path = self._make_one()
- expected = set()
- self.assertEqual(field_path.lineage(), expected)
-
- def test_lineage_single(self):
- field_path = self._make_one("a")
- expected = set()
- self.assertEqual(field_path.lineage(), expected)
-
- def test_lineage_nested(self):
- field_path = self._make_one("a", "b", "c")
- expected = set([self._make_one("a"), self._make_one("a", "b")])
- self.assertEqual(field_path.lineage(), expected)
diff --git a/tests/unit/v1beta1/test_order.py b/tests/unit/v1beta1/test_order.py
deleted file mode 100644
index f2aabc339e..0000000000
--- a/tests/unit/v1beta1/test_order.py
+++ /dev/null
@@ -1,247 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http:#www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import six
-import unittest
-
-from google.cloud.firestore_v1beta1._helpers import encode_value, GeoPoint
-from google.cloud.firestore_v1beta1.order import Order
-from google.cloud.firestore_v1beta1.order import TypeOrder
-
-from google.cloud.firestore_v1beta1.proto import document_pb2
-
-from google.protobuf import timestamp_pb2
-
-
-class TestOrder(unittest.TestCase):
-
- if six.PY2:
- assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
-
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.order import Order
-
- return Order
-
- def _make_one(self, *args, **kwargs):
- klass = self._get_target_class()
- return klass(*args, **kwargs)
-
- def test_order(self):
- # Constants used to represent min/max values of storage types.
- int_max_value = 2 ** 31 - 1
- int_min_value = -(2 ** 31)
- float_min_value = 1.175494351 ** -38
- float_nan = float("nan")
- inf = float("inf")
-
- groups = [None] * 65
-
- groups[0] = [nullValue()]
-
- groups[1] = [_boolean_value(False)]
- groups[2] = [_boolean_value(True)]
-
- # numbers
- groups[3] = [_double_value(float_nan), _double_value(float_nan)]
- groups[4] = [_double_value(-inf)]
- groups[5] = [_int_value(int_min_value - 1)]
- groups[6] = [_int_value(int_min_value)]
- groups[7] = [_double_value(-1.1)]
- # Integers and Doubles order the same.
- groups[8] = [_int_value(-1), _double_value(-1.0)]
- groups[9] = [_double_value(-float_min_value)]
- # zeros all compare the same.
- groups[10] = [
- _int_value(0),
- _double_value(-0.0),
- _double_value(0.0),
- _double_value(+0.0),
- ]
- groups[11] = [_double_value(float_min_value)]
- groups[12] = [_int_value(1), _double_value(1.0)]
- groups[13] = [_double_value(1.1)]
- groups[14] = [_int_value(int_max_value)]
- groups[15] = [_int_value(int_max_value + 1)]
- groups[16] = [_double_value(inf)]
-
- groups[17] = [_timestamp_value(123, 0)]
- groups[18] = [_timestamp_value(123, 123)]
- groups[19] = [_timestamp_value(345, 0)]
-
- # strings
- groups[20] = [_string_value("")]
- groups[21] = [_string_value("\u0000\ud7ff\ue000\uffff")]
- groups[22] = [_string_value("(╯°□°)╯︵ ┻━┻")]
- groups[23] = [_string_value("a")]
- groups[24] = [_string_value("abc def")]
- # latin small letter e + combining acute accent + latin small letter b
- groups[25] = [_string_value("e\u0301b")]
- groups[26] = [_string_value("æ")]
- # latin small letter e with acute accent + latin small letter a
- groups[27] = [_string_value("\u00e9a")]
-
- # blobs
- groups[28] = [_blob_value(b"")]
- groups[29] = [_blob_value(b"\x00")]
- groups[30] = [_blob_value(b"\x00\x01\x02\x03\x04")]
- groups[31] = [_blob_value(b"\x00\x01\x02\x04\x03")]
- groups[32] = [_blob_value(b"\x7f")]
-
- # resource names
- groups[33] = [_reference_value("projects/p1/databases/d1/documents/c1/doc1")]
- groups[34] = [_reference_value("projects/p1/databases/d1/documents/c1/doc2")]
- groups[35] = [
- _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc1")
- ]
- groups[36] = [
- _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc2")
- ]
- groups[37] = [_reference_value("projects/p1/databases/d1/documents/c10/doc1")]
- groups[38] = [_reference_value("projects/p1/databases/d1/documents/c2/doc1")]
- groups[39] = [_reference_value("projects/p2/databases/d2/documents/c1/doc1")]
- groups[40] = [_reference_value("projects/p2/databases/d2/documents/c1-/doc1")]
- groups[41] = [_reference_value("projects/p2/databases/d3/documents/c1-/doc1")]
-
- # geo points
- groups[42] = [_geoPoint_value(-90, -180)]
- groups[43] = [_geoPoint_value(-90, 0)]
- groups[44] = [_geoPoint_value(-90, 180)]
- groups[45] = [_geoPoint_value(0, -180)]
- groups[46] = [_geoPoint_value(0, 0)]
- groups[47] = [_geoPoint_value(0, 180)]
- groups[48] = [_geoPoint_value(1, -180)]
- groups[49] = [_geoPoint_value(1, 0)]
- groups[50] = [_geoPoint_value(1, 180)]
- groups[51] = [_geoPoint_value(90, -180)]
- groups[52] = [_geoPoint_value(90, 0)]
- groups[53] = [_geoPoint_value(90, 180)]
-
- # arrays
- groups[54] = [_array_value()]
- groups[55] = [_array_value(["bar"])]
- groups[56] = [_array_value(["foo"])]
- groups[57] = [_array_value(["foo", 0])]
- groups[58] = [_array_value(["foo", 1])]
- groups[59] = [_array_value(["foo", "0"])]
-
- # objects
- groups[60] = [_object_value({"bar": 0})]
- groups[61] = [_object_value({"bar": 0, "foo": 1})]
- groups[62] = [_object_value({"bar": 1})]
- groups[63] = [_object_value({"bar": 2})]
- groups[64] = [_object_value({"bar": "0"})]
-
- target = self._make_one()
-
- for i in range(len(groups)):
- for left in groups[i]:
- for j in range(len(groups)):
- for right in groups[j]:
- expected = Order._compare_to(i, j)
-
- self.assertEqual(
- target.compare(left, right),
- expected,
- "comparing L->R {} ({}) to {} ({})".format(
- i, left, j, right
- ),
- )
-
- expected = Order._compare_to(j, i)
- self.assertEqual(
- target.compare(right, left),
- expected,
- "comparing R->L {} ({}) to {} ({})".format(
- j, right, i, left
- ),
- )
-
- def test_typeorder_type_failure(self):
- target = self._make_one()
- left = mock.Mock()
- left.WhichOneof.return_value = "imaginary-type"
-
- with self.assertRaisesRegex(ValueError, "Could not detect value"):
- target.compare(left, mock.Mock())
-
- def test_failure_to_find_type(self):
- target = self._make_one()
- left = mock.Mock()
- left.WhichOneof.return_value = "imaginary-type"
- right = mock.Mock()
- # Patch from value to get to the deep compare. Since left is a bad type
- # expect this to fail with value error.
- with mock.patch.object(TypeOrder, "from_value") as to:
- to.value = None
- with self.assertRaisesRegex(ValueError, "'Unknown ``value_type``"):
- target.compare(left, right)
-
- def test_compare_objects_different_keys(self):
- left = _object_value({"foo": 0})
- right = _object_value({"bar": 0})
-
- target = self._make_one()
- target.compare(left, right)
-
-
-def _boolean_value(b):
- return encode_value(b)
-
-
-def _double_value(d):
- return encode_value(d)
-
-
-def _int_value(value):
- return encode_value(value)
-
-
-def _string_value(s):
- if not isinstance(s, six.text_type):
- s = six.u(s)
- return encode_value(s)
-
-
-def _reference_value(r):
- return document_pb2.Value(reference_value=r)
-
-
-def _blob_value(b):
- return encode_value(b)
-
-
-def nullValue():
- return encode_value(None)
-
-
-def _timestamp_value(seconds, nanos):
- return document_pb2.Value(
- timestamp_value=timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
- )
-
-
-def _geoPoint_value(latitude, longitude):
- return encode_value(GeoPoint(latitude, longitude))
-
-
-def _array_value(values=[]):
- return encode_value(values)
-
-
-def _object_value(keysAndValues):
- return encode_value(keysAndValues)
diff --git a/tests/unit/v1beta1/test_transforms.py b/tests/unit/v1beta1/test_transforms.py
deleted file mode 100644
index 0f549ae075..0000000000
--- a/tests/unit/v1beta1/test_transforms.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2017 Google LLC All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-
-class Test_ValueList(unittest.TestCase):
- @staticmethod
- def _get_target_class():
- from google.cloud.firestore_v1beta1.transforms import _ValueList
-
- return _ValueList
-
- def _make_one(self, values):
- return self._get_target_class()(values)
-
- def test_ctor_w_non_list_non_tuple(self):
- invalid_values = (None, u"phred", b"DEADBEEF", 123, {}, object())
- for invalid_value in invalid_values:
- with self.assertRaises(ValueError):
- self._make_one(invalid_value)
-
- def test_ctor_w_empty(self):
- with self.assertRaises(ValueError):
- self._make_one([])
-
- def test_ctor_w_non_empty_list(self):
- values = ["phred", "bharney"]
- inst = self._make_one(values)
- self.assertEqual(inst.values, values)
-
- def test_ctor_w_non_empty_tuple(self):
- values = ("phred", "bharney")
- inst = self._make_one(values)
- self.assertEqual(inst.values, list(values))
-
- def test___eq___other_type(self):
- values = ("phred", "bharney")
- inst = self._make_one(values)
- other = object()
- self.assertFalse(inst == other)
-
- def test___eq___different_values(self):
- values = ("phred", "bharney")
- other_values = ("wylma", "bhetty")
- inst = self._make_one(values)
- other = self._make_one(other_values)
- self.assertFalse(inst == other)
-
- def test___eq___same_values(self):
- values = ("phred", "bharney")
- inst = self._make_one(values)
- other = self._make_one(values)
- self.assertTrue(inst == other)
diff --git a/tests/unit/v1beta1/test_watch.py b/tests/unit/v1beta1/test_watch.py
deleted file mode 100644
index 6d8ba5a040..0000000000
--- a/tests/unit/v1beta1/test_watch.py
+++ /dev/null
@@ -1,832 +0,0 @@
-import datetime
-import unittest
-import mock
-from google.cloud.firestore_v1beta1.proto import firestore_pb2
-
-
-class TestWatchDocTree(unittest.TestCase):
- def _makeOne(self):
- from google.cloud.firestore_v1beta1.watch import WatchDocTree
-
- return WatchDocTree()
-
- def test_insert_and_keys(self):
- inst = self._makeOne()
- inst = inst.insert("b", 1)
- inst = inst.insert("a", 2)
- self.assertEqual(sorted(inst.keys()), ["a", "b"])
-
- def test_remove_and_keys(self):
- inst = self._makeOne()
- inst = inst.insert("b", 1)
- inst = inst.insert("a", 2)
- inst = inst.remove("a")
- self.assertEqual(sorted(inst.keys()), ["b"])
-
- def test_insert_and_find(self):
- inst = self._makeOne()
- inst = inst.insert("b", 1)
- inst = inst.insert("a", 2)
- val = inst.find("a")
- self.assertEqual(val.value, 2)
-
- def test___len__(self):
- inst = self._makeOne()
- inst = inst.insert("b", 1)
- inst = inst.insert("a", 2)
- self.assertEqual(len(inst), 2)
-
- def test___iter__(self):
- inst = self._makeOne()
- inst = inst.insert("b", 1)
- inst = inst.insert("a", 2)
- self.assertEqual(sorted(list(inst)), ["a", "b"])
-
- def test___contains__(self):
- inst = self._makeOne()
- inst = inst.insert("b", 1)
- self.assertTrue("b" in inst)
- self.assertFalse("a" in inst)
-
-
-class TestDocumentChange(unittest.TestCase):
- def _makeOne(self, type, document, old_index, new_index):
- from google.cloud.firestore_v1beta1.watch import DocumentChange
-
- return DocumentChange(type, document, old_index, new_index)
-
- def test_ctor(self):
- inst = self._makeOne("type", "document", "old_index", "new_index")
- self.assertEqual(inst.type, "type")
- self.assertEqual(inst.document, "document")
- self.assertEqual(inst.old_index, "old_index")
- self.assertEqual(inst.new_index, "new_index")
-
-
-class TestWatchResult(unittest.TestCase):
- def _makeOne(self, snapshot, name, change_type):
- from google.cloud.firestore_v1beta1.watch import WatchResult
-
- return WatchResult(snapshot, name, change_type)
-
- def test_ctor(self):
- inst = self._makeOne("snapshot", "name", "change_type")
- self.assertEqual(inst.snapshot, "snapshot")
- self.assertEqual(inst.name, "name")
- self.assertEqual(inst.change_type, "change_type")
-
-
-class Test_maybe_wrap_exception(unittest.TestCase):
- def _callFUT(self, exc):
- from google.cloud.firestore_v1beta1.watch import _maybe_wrap_exception
-
- return _maybe_wrap_exception(exc)
-
- def test_is_grpc_error(self):
- import grpc
- from google.api_core.exceptions import GoogleAPICallError
-
- exc = grpc.RpcError()
- result = self._callFUT(exc)
- self.assertEqual(result.__class__, GoogleAPICallError)
-
- def test_is_not_grpc_error(self):
- exc = ValueError()
- result = self._callFUT(exc)
- self.assertEqual(result.__class__, ValueError)
-
-
-class Test_document_watch_comparator(unittest.TestCase):
- def _callFUT(self, doc1, doc2):
- from google.cloud.firestore_v1beta1.watch import document_watch_comparator
-
- return document_watch_comparator(doc1, doc2)
-
- def test_same_doc(self):
- result = self._callFUT(1, 1)
- self.assertEqual(result, 0)
-
- def test_diff_doc(self):
- self.assertRaises(AssertionError, self._callFUT, 1, 2)
-
-
-class TestWatch(unittest.TestCase):
- def _makeOne(
- self,
- document_reference=None,
- firestore=None,
- target=None,
- comparator=None,
- snapshot_callback=None,
- snapshot_class=None,
- reference_class=None,
- ): # pragma: NO COVER
- from google.cloud.firestore_v1beta1.watch import Watch
-
- if document_reference is None:
- document_reference = DummyDocumentReference()
- if firestore is None:
- firestore = DummyFirestore()
- if target is None:
- WATCH_TARGET_ID = 0x5079 # "Py"
- target = {"documents": {"documents": ["/"]}, "target_id": WATCH_TARGET_ID}
- if comparator is None:
- comparator = self._document_watch_comparator
- if snapshot_callback is None:
- snapshot_callback = self._snapshot_callback
- if snapshot_class is None:
- snapshot_class = DummyDocumentSnapshot
- if reference_class is None:
- reference_class = DummyDocumentReference
- inst = Watch(
- document_reference,
- firestore,
- target,
- comparator,
- snapshot_callback,
- snapshot_class,
- reference_class,
- BackgroundConsumer=DummyBackgroundConsumer,
- ResumableBidiRpc=DummyRpc,
- )
- return inst
-
- def setUp(self):
- self.snapshotted = None
-
- def _document_watch_comparator(self, doc1, doc2): # pragma: NO COVER
- return 0
-
- def _snapshot_callback(self, docs, changes, read_time):
- self.snapshotted = (docs, changes, read_time)
-
- def test_ctor(self):
- inst = self._makeOne()
- self.assertTrue(inst._consumer.started)
- self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done])
-
- def test__on_rpc_done(self):
- inst = self._makeOne()
- threading = DummyThreading()
- with mock.patch("google.cloud.firestore_v1beta1.watch.threading", threading):
- inst._on_rpc_done(True)
- from google.cloud.firestore_v1beta1.watch import _RPC_ERROR_THREAD_NAME
-
- self.assertTrue(threading.threads[_RPC_ERROR_THREAD_NAME].started)
-
- def test_close(self):
- inst = self._makeOne()
- inst.close()
- self.assertEqual(inst._consumer, None)
- self.assertEqual(inst._rpc, None)
- self.assertTrue(inst._closed)
-
- def test_close_already_closed(self):
- inst = self._makeOne()
- inst._closed = True
- old_consumer = inst._consumer
- inst.close()
- self.assertEqual(inst._consumer, old_consumer)
-
- def test_close_inactive(self):
- inst = self._makeOne()
- old_consumer = inst._consumer
- old_consumer.is_active = False
- inst.close()
- self.assertEqual(old_consumer.stopped, False)
-
- def test_unsubscribe(self):
- inst = self._makeOne()
- inst.unsubscribe()
- self.assertTrue(inst._rpc is None)
-
- def test_for_document(self):
- from google.cloud.firestore_v1beta1.watch import Watch
-
- docref = DummyDocumentReference()
- snapshot_callback = self._snapshot_callback
- snapshot_class_instance = DummyDocumentSnapshot
- document_reference_class_instance = DummyDocumentReference
- modulename = "google.cloud.firestore_v1beta1.watch"
- with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc):
- with mock.patch(
- "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer
- ):
- inst = Watch.for_document(
- docref,
- snapshot_callback,
- snapshot_class_instance,
- document_reference_class_instance,
- )
- self.assertTrue(inst._consumer.started)
- self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done])
-
- def test_for_query(self):
- from google.cloud.firestore_v1beta1.watch import Watch
-
- snapshot_callback = self._snapshot_callback
- snapshot_class_instance = DummyDocumentSnapshot
- document_reference_class_instance = DummyDocumentReference
- modulename = "google.cloud.firestore_v1beta1.watch"
- pb2 = DummyPb2()
- with mock.patch("%s.firestore_pb2" % modulename, pb2):
- with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc):
- with mock.patch(
- "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer
- ):
- query = DummyQuery()
- inst = Watch.for_query(
- query,
- snapshot_callback,
- snapshot_class_instance,
- document_reference_class_instance,
- )
- self.assertTrue(inst._consumer.started)
- self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done])
- self.assertEqual(inst._targets["query"], "dummy query target")
-
- def test_on_snapshot_target_no_change_no_target_ids_not_current(self):
- inst = self._makeOne()
- proto = DummyProto()
- inst.on_snapshot(proto) # nothing to assert, no mutations, no rtnval
-
- def test_on_snapshot_target_no_change_no_target_ids_current(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change.read_time = 1
- inst.current = True
-
- def push(read_time, next_resume_token):
- inst._read_time = read_time
- inst._next_resume_token = next_resume_token
-
- inst.push = push
- inst.on_snapshot(proto)
- self.assertEqual(inst._read_time, 1)
- self.assertEqual(inst._next_resume_token, None)
-
- def test_on_snapshot_target_add(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change.target_change_type = firestore_pb2.TargetChange.ADD
- proto.target_change.target_ids = [1] # not "Py"
- with self.assertRaises(Exception) as exc:
- inst.on_snapshot(proto)
- self.assertEqual(str(exc.exception), "Unexpected target ID 1 sent by server")
-
- def test_on_snapshot_target_remove(self):
- inst = self._makeOne()
- proto = DummyProto()
- target_change = proto.target_change
- target_change.target_change_type = firestore_pb2.TargetChange.REMOVE
- with self.assertRaises(Exception) as exc:
- inst.on_snapshot(proto)
- self.assertEqual(str(exc.exception), "Error 1: hi")
-
- def test_on_snapshot_target_remove_nocause(self):
- inst = self._makeOne()
- proto = DummyProto()
- target_change = proto.target_change
- target_change.cause = None
- target_change.target_change_type = firestore_pb2.TargetChange.REMOVE
- with self.assertRaises(Exception) as exc:
- inst.on_snapshot(proto)
- self.assertEqual(str(exc.exception), "Error 13: internal error")
-
- def test_on_snapshot_target_reset(self):
- inst = self._makeOne()
-
- def reset():
- inst._docs_reset = True
-
- inst._reset_docs = reset
- proto = DummyProto()
- target_change = proto.target_change
- target_change.target_change_type = firestore_pb2.TargetChange.RESET
- inst.on_snapshot(proto)
- self.assertTrue(inst._docs_reset)
-
- def test_on_snapshot_target_current(self):
- inst = self._makeOne()
- inst.current = False
- proto = DummyProto()
- target_change = proto.target_change
- target_change.target_change_type = firestore_pb2.TargetChange.CURRENT
- inst.on_snapshot(proto)
- self.assertTrue(inst.current)
-
- def test_on_snapshot_target_unknown(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change.target_change_type = "unknown"
- with self.assertRaises(Exception) as exc:
- inst.on_snapshot(proto)
- self.assertTrue(inst._consumer is None)
- self.assertTrue(inst._rpc is None)
- self.assertEqual(str(exc.exception), "Unknown target change type: unknown ")
-
- def test_on_snapshot_document_change_removed(self):
- from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID, ChangeType
-
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change.removed_target_ids = [WATCH_TARGET_ID]
-
- class DummyDocument:
- name = "fred"
-
- proto.document_change.document = DummyDocument()
- inst.on_snapshot(proto)
- self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED)
-
- def test_on_snapshot_document_change_changed(self):
- from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID
-
- inst = self._makeOne()
-
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change.target_ids = [WATCH_TARGET_ID]
-
- class DummyDocument:
- name = "fred"
- fields = {}
- create_time = None
- update_time = None
-
- proto.document_change.document = DummyDocument()
- inst.on_snapshot(proto)
- self.assertEqual(inst.change_map["fred"].data, {})
-
- def test_on_snapshot_document_change_changed_docname_db_prefix(self):
- # TODO: Verify the current behavior. The change map currently contains
- # the db-prefixed document name and not the bare document name.
- from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID
-
- inst = self._makeOne()
-
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change.target_ids = [WATCH_TARGET_ID]
-
- class DummyDocument:
- name = "abc://foo/documents/fred"
- fields = {}
- create_time = None
- update_time = None
-
- proto.document_change.document = DummyDocument()
- inst._firestore._database_string = "abc://foo"
- inst.on_snapshot(proto)
- self.assertEqual(inst.change_map["abc://foo/documents/fred"].data, {})
-
- def test_on_snapshot_document_change_neither_changed_nor_removed(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change.target_ids = []
-
- inst.on_snapshot(proto)
- self.assertTrue(not inst.change_map)
-
- def test_on_snapshot_document_removed(self):
- from google.cloud.firestore_v1beta1.watch import ChangeType
-
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change = ""
-
- class DummyRemove(object):
- document = "fred"
-
- remove = DummyRemove()
- proto.document_remove = remove
- proto.document_delete = ""
- inst.on_snapshot(proto)
- self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED)
-
- def test_on_snapshot_filter_update(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change = ""
- proto.document_remove = ""
- proto.document_delete = ""
-
- class DummyFilter(object):
- count = 999
-
- proto.filter = DummyFilter()
-
- def reset():
- inst._docs_reset = True
-
- inst._reset_docs = reset
- inst.on_snapshot(proto)
- self.assertTrue(inst._docs_reset)
-
- def test_on_snapshot_filter_update_no_size_change(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change = ""
- proto.document_remove = ""
- proto.document_delete = ""
-
- class DummyFilter(object):
- count = 0
-
- proto.filter = DummyFilter()
- inst._docs_reset = False
-
- inst.on_snapshot(proto)
- self.assertFalse(inst._docs_reset)
-
- def test_on_snapshot_unknown_listen_type(self):
- inst = self._makeOne()
- proto = DummyProto()
- proto.target_change = ""
- proto.document_change = ""
- proto.document_remove = ""
- proto.document_delete = ""
- proto.filter = ""
- with self.assertRaises(Exception) as exc:
- inst.on_snapshot(proto)
- self.assertTrue(
- str(exc.exception).startswith("Unknown listen response type"),
- str(exc.exception),
- )
-
- def test_push_callback_called_no_changes(self):
- import pytz
-
- class DummyReadTime(object):
- seconds = 1534858278
-
- inst = self._makeOne()
- inst.push(DummyReadTime, "token")
- self.assertEqual(
- self.snapshotted,
- ([], [], datetime.datetime.fromtimestamp(DummyReadTime.seconds, pytz.utc)),
- )
- self.assertTrue(inst.has_pushed)
- self.assertEqual(inst.resume_token, "token")
-
- def test_push_already_pushed(self):
- class DummyReadTime(object):
- seconds = 1534858278
-
- inst = self._makeOne()
- inst.has_pushed = True
- inst.push(DummyReadTime, "token")
- self.assertEqual(self.snapshotted, None)
- self.assertTrue(inst.has_pushed)
- self.assertEqual(inst.resume_token, "token")
-
- def test__current_size_empty(self):
- inst = self._makeOne()
- result = inst._current_size()
- self.assertEqual(result, 0)
-
- def test__current_size_docmap_has_one(self):
- inst = self._makeOne()
- inst.doc_map["a"] = 1
- result = inst._current_size()
- self.assertEqual(result, 1)
-
- def test__affects_target_target_id_None(self):
- inst = self._makeOne()
- self.assertTrue(inst._affects_target(None, []))
-
- def test__affects_target_current_id_in_target_ids(self):
- inst = self._makeOne()
- self.assertTrue(inst._affects_target([1], 1))
-
- def test__affects_target_current_id_not_in_target_ids(self):
- inst = self._makeOne()
- self.assertFalse(inst._affects_target([1], 2))
-
- def test__extract_changes_doc_removed(self):
- from google.cloud.firestore_v1beta1.watch import ChangeType
-
- inst = self._makeOne()
- changes = {"name": ChangeType.REMOVED}
- doc_map = {"name": True}
- results = inst._extract_changes(doc_map, changes, None)
- self.assertEqual(results, (["name"], [], []))
-
- def test__extract_changes_doc_removed_docname_not_in_docmap(self):
- from google.cloud.firestore_v1beta1.watch import ChangeType
-
- inst = self._makeOne()
- changes = {"name": ChangeType.REMOVED}
- doc_map = {}
- results = inst._extract_changes(doc_map, changes, None)
- self.assertEqual(results, ([], [], []))
-
- def test__extract_changes_doc_updated(self):
- inst = self._makeOne()
-
- class Dummy(object):
- pass
-
- doc = Dummy()
- snapshot = Dummy()
- changes = {"name": snapshot}
- doc_map = {"name": doc}
- results = inst._extract_changes(doc_map, changes, 1)
- self.assertEqual(results, ([], [], [snapshot]))
- self.assertEqual(snapshot.read_time, 1)
-
- def test__extract_changes_doc_updated_read_time_is_None(self):
- inst = self._makeOne()
-
- class Dummy(object):
- pass
-
- doc = Dummy()
- snapshot = Dummy()
- snapshot.read_time = None
- changes = {"name": snapshot}
- doc_map = {"name": doc}
- results = inst._extract_changes(doc_map, changes, None)
- self.assertEqual(results, ([], [], [snapshot]))
- self.assertEqual(snapshot.read_time, None)
-
- def test__extract_changes_doc_added(self):
- inst = self._makeOne()
-
- class Dummy(object):
- pass
-
- snapshot = Dummy()
- changes = {"name": snapshot}
- doc_map = {}
- results = inst._extract_changes(doc_map, changes, 1)
- self.assertEqual(results, ([], [snapshot], []))
- self.assertEqual(snapshot.read_time, 1)
-
- def test__extract_changes_doc_added_read_time_is_None(self):
- inst = self._makeOne()
-
- class Dummy(object):
- pass
-
- snapshot = Dummy()
- snapshot.read_time = None
- changes = {"name": snapshot}
- doc_map = {}
- results = inst._extract_changes(doc_map, changes, None)
- self.assertEqual(results, ([], [snapshot], []))
- self.assertEqual(snapshot.read_time, None)
-
- def test__compute_snapshot_doctree_and_docmap_disagree_about_length(self):
- inst = self._makeOne()
- doc_tree = {}
- doc_map = {None: None}
- self.assertRaises(
- AssertionError, inst._compute_snapshot, doc_tree, doc_map, None, None, None
- )
-
- def test__compute_snapshot_operation_relative_ordering(self):
- from google.cloud.firestore_v1beta1.watch import WatchDocTree
-
- doc_tree = WatchDocTree()
-
- class DummyDoc(object):
- update_time = mock.sentinel
-
- deleted_doc = DummyDoc()
- added_doc = DummyDoc()
- added_doc._document_path = "/added"
- updated_doc = DummyDoc()
- updated_doc._document_path = "/updated"
- doc_tree = doc_tree.insert(deleted_doc, None)
- doc_tree = doc_tree.insert(updated_doc, None)
- doc_map = {"/deleted": deleted_doc, "/updated": updated_doc}
- added_snapshot = DummyDocumentSnapshot(added_doc, None, True, None, None, None)
- added_snapshot.reference = added_doc
- updated_snapshot = DummyDocumentSnapshot(
- updated_doc, None, True, None, None, None
- )
- updated_snapshot.reference = updated_doc
- delete_changes = ["/deleted"]
- add_changes = [added_snapshot]
- update_changes = [updated_snapshot]
- inst = self._makeOne()
- updated_tree, updated_map, applied_changes = inst._compute_snapshot(
- doc_tree, doc_map, delete_changes, add_changes, update_changes
- )
- # TODO: Verify that the assertion here is correct.
- self.assertEqual(
- updated_map, {"/updated": updated_snapshot, "/added": added_snapshot}
- )
-
- def test__compute_snapshot_modify_docs_updated_doc_no_timechange(self):
- from google.cloud.firestore_v1beta1.watch import WatchDocTree
-
- doc_tree = WatchDocTree()
-
- class DummyDoc(object):
- pass
-
- updated_doc_v1 = DummyDoc()
- updated_doc_v1.update_time = 1
- updated_doc_v1._document_path = "/updated"
- updated_doc_v2 = DummyDoc()
- updated_doc_v2.update_time = 1
- updated_doc_v2._document_path = "/updated"
- doc_tree = doc_tree.insert("/updated", updated_doc_v1)
- doc_map = {"/updated": updated_doc_v1}
- updated_snapshot = DummyDocumentSnapshot(
- updated_doc_v2, None, True, None, None, 1
- )
- delete_changes = []
- add_changes = []
- update_changes = [updated_snapshot]
- inst = self._makeOne()
- updated_tree, updated_map, applied_changes = inst._compute_snapshot(
- doc_tree, doc_map, delete_changes, add_changes, update_changes
- )
- self.assertEqual(updated_map, doc_map) # no change
-
- def test__reset_docs(self):
- from google.cloud.firestore_v1beta1.watch import ChangeType
-
- inst = self._makeOne()
- inst.change_map = {None: None}
- from google.cloud.firestore_v1beta1.watch import WatchDocTree
-
- doc = DummyDocumentReference("doc")
- doc_tree = WatchDocTree()
- snapshot = DummyDocumentSnapshot(doc, None, True, None, None, None)
- snapshot.reference = doc
- doc_tree = doc_tree.insert(snapshot, None)
- inst.doc_tree = doc_tree
- inst._reset_docs()
- self.assertEqual(inst.change_map, {"/doc": ChangeType.REMOVED})
- self.assertEqual(inst.resume_token, None)
- self.assertFalse(inst.current)
-
-
-class DummyFirestoreStub(object):
- def Listen(self): # pragma: NO COVER
- pass
-
-
-class DummyFirestoreClient(object):
- def __init__(self):
- self.transport = mock.Mock(_stubs={"firestore_stub": DummyFirestoreStub()})
-
-
-class DummyDocumentReference(object):
- def __init__(self, *document_path, **kw):
- if "client" not in kw:
- self._client = DummyFirestore()
- else:
- self._client = kw["client"]
-
- self._path = document_path
- self._document_path = "/" + "/".join(document_path)
- self.__dict__.update(kw)
-
-
-class DummyQuery(object): # pragma: NO COVER
- def __init__(self, **kw):
- if "client" not in kw:
- self._client = DummyFirestore()
- else:
- self._client = kw["client"]
-
- if "comparator" not in kw:
- # don't really do the comparison, just return 0 (equal) for all
- self._comparator = lambda x, y: 1
- else:
- self._comparator = kw["comparator"]
-
- def _to_protobuf(self):
- return ""
-
-
-class DummyFirestore(object):
- _firestore_api = DummyFirestoreClient()
- _database_string = "abc://bar/"
- _rpc_metadata = None
-
- def document(self, *document_path): # pragma: NO COVER
- if len(document_path) == 1:
- path = document_path[0].split("/")
- else:
- path = document_path
-
- return DummyDocumentReference(*path, client=self)
-
-
-class DummyDocumentSnapshot(object):
- # def __init__(self, **kw):
- # self.__dict__.update(kw)
- def __init__(self, reference, data, exists, read_time, create_time, update_time):
- self.reference = reference
- self.data = data
- self.exists = exists
- self.read_time = read_time
- self.create_time = create_time
- self.update_time = update_time
-
- def __str__(self):
- return "%s-%s" % (self.reference._document_path, self.read_time)
-
- def __hash__(self):
- return hash(str(self))
-
-
-class DummyBackgroundConsumer(object):
- started = False
- stopped = False
- is_active = True
-
- def __init__(self, rpc, on_snapshot):
- self._rpc = rpc
- self.on_snapshot = on_snapshot
-
- def start(self):
- self.started = True
-
- def stop(self):
- self.stopped = True
- self.is_active = False
-
-
-class DummyThread(object):
- started = False
-
- def __init__(self, name, target, kwargs):
- self.name = name
- self.target = target
- self.kwargs = kwargs
-
- def start(self):
- self.started = True
-
-
-class DummyThreading(object):
- def __init__(self):
- self.threads = {}
-
- def Thread(self, name, target, kwargs):
- thread = DummyThread(name, target, kwargs)
- self.threads[name] = thread
- return thread
-
-
-class DummyRpc(object):
- def __init__(self, listen, initial_request, should_recover, metadata=None):
- self.listen = listen
- self.initial_request = initial_request
- self.should_recover = should_recover
- self.closed = False
- self.callbacks = []
- self._metadata = metadata
-
- def add_done_callback(self, callback):
- self.callbacks.append(callback)
-
- def close(self):
- self.closed = True
-
-
-class DummyCause(object):
- code = 1
- message = "hi"
-
-
-class DummyChange(object):
- def __init__(self):
- self.target_ids = []
- self.removed_target_ids = []
- self.read_time = 0
- self.target_change_type = firestore_pb2.TargetChange.NO_CHANGE
- self.resume_token = None
- self.cause = DummyCause()
-
-
-class DummyProto(object):
- def __init__(self):
- self.target_change = DummyChange()
- self.document_change = DummyChange()
-
-
-class DummyTarget(object):
- def QueryTarget(self, **kw):
- self.kw = kw
- return "dummy query target"
-
-
-class DummyPb2(object):
-
- Target = DummyTarget()
-
- def ListenRequest(self, **kw):
- pass
diff --git a/tests/unit/v1beta1/testdata/create-all-transforms.textproto b/tests/unit/v1beta1/testdata/create-all-transforms.textproto
deleted file mode 100644
index bbdf19e4df..0000000000
--- a/tests/unit/v1beta1/testdata/create-all-transforms.textproto
+++ /dev/null
@@ -1,64 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can be created with any amount of transforms.
-
-description: "create: all transforms in a single call"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto
deleted file mode 100644
index f80d65b238..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto
+++ /dev/null
@@ -1,61 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayRemove field. Since all the ArrayRemove
-# fields are removed, the only field in the update is "a".
-
-description: "create: multiple ArrayRemove fields"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto
deleted file mode 100644
index 97756c306c..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto
+++ /dev/null
@@ -1,48 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayRemove value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "create: nested ArrayRemove field"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto
deleted file mode 100644
index 4ec0cb3b93..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayRemove. Firestore transforms don't support array indexing.
-
-description: "create: ArrayRemove cannot be anywhere inside an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto
deleted file mode 100644
index 969b8d9dd8..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayRemove must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "create: ArrayRemove cannot be in an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto
deleted file mode 100644
index b6ea3224de..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "create: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayremove.textproto b/tests/unit/v1beta1/testdata/create-arrayremove.textproto
deleted file mode 100644
index e8e4bb3980..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayremove.textproto
+++ /dev/null
@@ -1,47 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayRemove is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "create: ArrayRemove with data"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto
deleted file mode 100644
index ec3cb72f5b..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto
+++ /dev/null
@@ -1,61 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayUnion field. Since all the ArrayUnion
-# fields are removed, the only field in the update is "a".
-
-description: "create: multiple ArrayUnion fields"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- append_missing_elements: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto
deleted file mode 100644
index e6e81bc1d7..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto
+++ /dev/null
@@ -1,48 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayUnion value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "create: nested ArrayUnion field"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto
deleted file mode 100644
index 4c0afe4430..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayUnion. Firestore transforms don't support array indexing.
-
-description: "create: ArrayUnion cannot be anywhere inside an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto
deleted file mode 100644
index 7b791fa415..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayUnion must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "create: ArrayUnion cannot be in an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto
deleted file mode 100644
index a1bf4a90d1..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "create: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-arrayunion.textproto b/tests/unit/v1beta1/testdata/create-arrayunion.textproto
deleted file mode 100644
index 98cb6ad8ac..0000000000
--- a/tests/unit/v1beta1/testdata/create-arrayunion.textproto
+++ /dev/null
@@ -1,47 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayUnion is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "create: ArrayUnion with data"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-basic.textproto b/tests/unit/v1beta1/testdata/create-basic.textproto
deleted file mode 100644
index 433ffda727..0000000000
--- a/tests/unit/v1beta1/testdata/create-basic.textproto
+++ /dev/null
@@ -1,27 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A simple call, resulting in a single update operation.
-
-description: "create: basic"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-complex.textproto b/tests/unit/v1beta1/testdata/create-complex.textproto
deleted file mode 100644
index 00a994e204..0000000000
--- a/tests/unit/v1beta1/testdata/create-complex.textproto
+++ /dev/null
@@ -1,61 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A call to a write method with complicated input data.
-
-description: "create: complex"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- array_value: <
- values: <
- integer_value: 1
- >
- values: <
- double_value: 2.5
- >
- >
- >
- >
- fields: <
- key: "b"
- value: <
- map_value: <
- fields: <
- key: "c"
- value: <
- array_value: <
- values: <
- string_value: "three"
- >
- values: <
- map_value: <
- fields: <
- key: "d"
- value: <
- boolean_value: true
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto
deleted file mode 100644
index 60694e1371..0000000000
--- a/tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "create: Delete cannot be anywhere inside an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-del-noarray.textproto b/tests/unit/v1beta1/testdata/create-del-noarray.textproto
deleted file mode 100644
index 5731be1c73..0000000000
--- a/tests/unit/v1beta1/testdata/create-del-noarray.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "create: Delete cannot be in an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, \"Delete\"]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-empty.textproto b/tests/unit/v1beta1/testdata/create-empty.textproto
deleted file mode 100644
index 2b6fec7efa..0000000000
--- a/tests/unit/v1beta1/testdata/create-empty.textproto
+++ /dev/null
@@ -1,20 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-
-description: "create: creating or setting an empty map"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- current_document: <
- exists: false
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-nodel.textproto b/tests/unit/v1beta1/testdata/create-nodel.textproto
deleted file mode 100644
index c878814b11..0000000000
--- a/tests/unit/v1beta1/testdata/create-nodel.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel cannot be used in Create, or in Set without a Merge option.
-
-description: "create: Delete cannot appear in data"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"Delete\"}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-nosplit.textproto b/tests/unit/v1beta1/testdata/create-nosplit.textproto
deleted file mode 100644
index e9e1ee2755..0000000000
--- a/tests/unit/v1beta1/testdata/create-nosplit.textproto
+++ /dev/null
@@ -1,40 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Create and Set treat their map keys literally. They do not split on dots.
-
-description: "create: don\342\200\231t split on dots"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a.b"
- value: <
- map_value: <
- fields: <
- key: "c.d"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- fields: <
- key: "e"
- value: <
- integer_value: 2
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-special-chars.textproto b/tests/unit/v1beta1/testdata/create-special-chars.textproto
deleted file mode 100644
index 3a7acd3075..0000000000
--- a/tests/unit/v1beta1/testdata/create-special-chars.textproto
+++ /dev/null
@@ -1,41 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Create and Set treat their map keys literally. They do not escape special
-# characters.
-
-description: "create: non-alpha characters in map keys"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "*"
- value: <
- map_value: <
- fields: <
- key: "."
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- fields: <
- key: "~"
- value: <
- integer_value: 2
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-st-alone.textproto b/tests/unit/v1beta1/testdata/create-st-alone.textproto
deleted file mode 100644
index 9803a676bb..0000000000
--- a/tests/unit/v1beta1/testdata/create-st-alone.textproto
+++ /dev/null
@@ -1,26 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ServerTimestamps, then no update operation
-# should be produced.
-
-description: "create: ServerTimestamp alone"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- set_to_server_value: REQUEST_TIME
- >
- >
- current_document: <
- exists: false
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-st-multi.textproto b/tests/unit/v1beta1/testdata/create-st-multi.textproto
deleted file mode 100644
index cb3db48099..0000000000
--- a/tests/unit/v1beta1/testdata/create-st-multi.textproto
+++ /dev/null
@@ -1,41 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ServerTimestamp field. Since all the
-# ServerTimestamp fields are removed, the only field in the update is "a".
-
-description: "create: multiple ServerTimestamp fields"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c.d"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-st-nested.textproto b/tests/unit/v1beta1/testdata/create-st-nested.textproto
deleted file mode 100644
index 6bc03e8e7c..0000000000
--- a/tests/unit/v1beta1/testdata/create-st-nested.textproto
+++ /dev/null
@@ -1,38 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A ServerTimestamp value can occur at any depth. In this case, the transform
-# applies to the field path "b.c". Since "c" is removed from the update, "b"
-# becomes empty, so it is also removed from the update.
-
-description: "create: nested ServerTimestamp field"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto
deleted file mode 100644
index 0cec0aebd4..0000000000
--- a/tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ServerTimestamp sentinel. Firestore transforms don't support array indexing.
-
-description: "create: ServerTimestamp cannot be anywhere inside an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-st-noarray.textproto b/tests/unit/v1beta1/testdata/create-st-noarray.textproto
deleted file mode 100644
index 56d91c2cfb..0000000000
--- a/tests/unit/v1beta1/testdata/create-st-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. Firestore transforms
-# don't support array indexing.
-
-description: "create: ServerTimestamp cannot be in an array value"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto
deleted file mode 100644
index 37e7e074ab..0000000000
--- a/tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp
-# should be stripped out but the empty map should remain.
-
-description: "create: ServerTimestamp beside an empty map"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- map_value: <
- >
- >
- >
- >
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/create-st.textproto b/tests/unit/v1beta1/testdata/create-st.textproto
deleted file mode 100644
index ddfc6a177e..0000000000
--- a/tests/unit/v1beta1/testdata/create-st.textproto
+++ /dev/null
@@ -1,39 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with the special ServerTimestamp sentinel is removed from the data in the
-# update operation. Instead it appears in a separate Transform operation. Note
-# that in these tests, the string "ServerTimestamp" should be replaced with the
-# special ServerTimestamp value.
-
-description: "create: ServerTimestamp with data"
-create: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- current_document: <
- exists: false
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/delete-exists-precond.textproto b/tests/unit/v1beta1/testdata/delete-exists-precond.textproto
deleted file mode 100644
index c9cf2ddea4..0000000000
--- a/tests/unit/v1beta1/testdata/delete-exists-precond.textproto
+++ /dev/null
@@ -1,21 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Delete supports an exists precondition.
-
-description: "delete: delete with exists precondition"
-delete: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- precondition: <
- exists: true
- >
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- delete: "projects/projectID/databases/(default)/documents/C/d"
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/delete-no-precond.textproto b/tests/unit/v1beta1/testdata/delete-no-precond.textproto
deleted file mode 100644
index a396cdb8c4..0000000000
--- a/tests/unit/v1beta1/testdata/delete-no-precond.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ordinary Delete call.
-
-description: "delete: delete without precondition"
-delete: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- delete: "projects/projectID/databases/(default)/documents/C/d"
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/delete-time-precond.textproto b/tests/unit/v1beta1/testdata/delete-time-precond.textproto
deleted file mode 100644
index 5798f5f3b2..0000000000
--- a/tests/unit/v1beta1/testdata/delete-time-precond.textproto
+++ /dev/null
@@ -1,25 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Delete supports a last-update-time precondition.
-
-description: "delete: delete with last-update-time precondition"
-delete: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- precondition: <
- update_time: <
- seconds: 42
- >
- >
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- delete: "projects/projectID/databases/(default)/documents/C/d"
- current_document: <
- update_time: <
- seconds: 42
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/get-basic.textproto b/tests/unit/v1beta1/testdata/get-basic.textproto
deleted file mode 100644
index 2a44816825..0000000000
--- a/tests/unit/v1beta1/testdata/get-basic.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A call to DocumentRef.Get.
-
-description: "get: get a document"
-get: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- request: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto b/tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto
deleted file mode 100644
index 1aa8dcbc36..0000000000
--- a/tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto
+++ /dev/null
@@ -1,246 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Various changes to a single document.
-
-description: "listen: add a doc, modify it, delete it, then add it again"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- responses: <
- document_delete: <
- document: "projects/projectID/databases/(default)/documents/C/d1"
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 3
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 4
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 1
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- changes: <
- kind: MODIFIED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- >
- read_time: <
- seconds: 2
- >
- >
- snapshots: <
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- new_index: -1
- >
- read_time: <
- seconds: 3
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 4
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-add-one.textproto b/tests/unit/v1beta1/testdata/listen-add-one.textproto
deleted file mode 100644
index 2ad1d8e976..0000000000
--- a/tests/unit/v1beta1/testdata/listen-add-one.textproto
+++ /dev/null
@@ -1,79 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Snapshot with a single document.
-
-description: "listen: add a doc"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 2
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-add-three.textproto b/tests/unit/v1beta1/testdata/listen-add-three.textproto
deleted file mode 100644
index ac846f7626..0000000000
--- a/tests/unit/v1beta1/testdata/listen-add-three.textproto
+++ /dev/null
@@ -1,190 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A snapshot with three documents. The documents are sorted first by the "a"
-# field, then by their path. The changes are ordered the same way.
-
-description: "listen: add three documents"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 2
- >
- read_time: <
- seconds: 2
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-doc-remove.textproto b/tests/unit/v1beta1/testdata/listen-doc-remove.textproto
deleted file mode 100644
index 975200f973..0000000000
--- a/tests/unit/v1beta1/testdata/listen-doc-remove.textproto
+++ /dev/null
@@ -1,115 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The DocumentRemove response behaves exactly like DocumentDelete.
-
-description: "listen: DocumentRemove behaves like DocumentDelete"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_remove: <
- document: "projects/projectID/databases/(default)/documents/C/d1"
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 1
- >
- >
- snapshots: <
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- new_index: -1
- >
- read_time: <
- seconds: 2
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-empty.textproto b/tests/unit/v1beta1/testdata/listen-empty.textproto
deleted file mode 100644
index 4d04b79096..0000000000
--- a/tests/unit/v1beta1/testdata/listen-empty.textproto
+++ /dev/null
@@ -1,25 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There are no changes, so the snapshot should be empty.
-
-description: "listen: no changes; empty snapshot"
-listen: <
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- snapshots: <
- read_time: <
- seconds: 1
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-filter-nop.textproto b/tests/unit/v1beta1/testdata/listen-filter-nop.textproto
deleted file mode 100644
index 48fd72d3ae..0000000000
--- a/tests/unit/v1beta1/testdata/listen-filter-nop.textproto
+++ /dev/null
@@ -1,247 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Filter response whose count matches the size of the current state (docs in
-# last snapshot + docs added - docs deleted) is a no-op.
-
-description: "listen: Filter response with same size is a no-op"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_delete: <
- document: "projects/projectID/databases/(default)/documents/C/d1"
- >
- >
- responses: <
- filter: <
- count: 2
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 1
- >
- read_time: <
- seconds: 1
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: 1
- new_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 1
- >
- read_time: <
- seconds: 2
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-multi-docs.textproto b/tests/unit/v1beta1/testdata/listen-multi-docs.textproto
deleted file mode 100644
index 8778acc3d1..0000000000
--- a/tests/unit/v1beta1/testdata/listen-multi-docs.textproto
+++ /dev/null
@@ -1,524 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Changes should be ordered with deletes first, then additions, then mods, each in
-# query order. Old indices refer to the immediately previous state, not the
-# previous snapshot
-
-description: "listen: multiple documents, added, deleted and updated"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d4"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d5"
- fields: <
- key: "a"
- value: <
- integer_value: 4
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_delete: <
- document: "projects/projectID/databases/(default)/documents/C/d3"
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: -1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d6"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_delete: <
- document: "projects/projectID/databases/(default)/documents/C/d2"
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d4"
- fields: <
- key: "a"
- value: <
- integer_value: -2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 4
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d4"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d4"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 2
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 3
- >
- read_time: <
- seconds: 2
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d4"
- fields: <
- key: "a"
- value: <
- integer_value: -2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: -1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d6"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d5"
- fields: <
- key: "a"
- value: <
- integer_value: 4
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- new_index: -1
- >
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- new_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d6"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 2
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d5"
- fields: <
- key: "a"
- value: <
- integer_value: 4
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 3
- >
- changes: <
- kind: MODIFIED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d4"
- fields: <
- key: "a"
- value: <
- integer_value: -2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- >
- changes: <
- kind: MODIFIED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: -1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- old_index: 1
- new_index: 1
- >
- read_time: <
- seconds: 4
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-nocurrent.textproto b/tests/unit/v1beta1/testdata/listen-nocurrent.textproto
deleted file mode 100644
index 24239b6456..0000000000
--- a/tests/unit/v1beta1/testdata/listen-nocurrent.textproto
+++ /dev/null
@@ -1,141 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the watch state is not marked CURRENT, no snapshot is issued.
-
-description: "listen: no snapshot if we don't see CURRENT"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- old_index: -1
- new_index: 1
- >
- read_time: <
- seconds: 2
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-nomod.textproto b/tests/unit/v1beta1/testdata/listen-nomod.textproto
deleted file mode 100644
index 2a99edc350..0000000000
--- a/tests/unit/v1beta1/testdata/listen-nomod.textproto
+++ /dev/null
@@ -1,143 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Document updates are recognized by a change in the update time, not the data.
-# This shouldn't actually happen. It is just a test of the update logic.
-
-description: "listen: add a doc, then change it but without changing its update time"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- responses: <
- document_delete: <
- document: "projects/projectID/databases/(default)/documents/C/d1"
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 3
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 1
- >
- >
- snapshots: <
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- new_index: -1
- >
- read_time: <
- seconds: 3
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto b/tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto
deleted file mode 100644
index 1e8ead2d80..0000000000
--- a/tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto
+++ /dev/null
@@ -1,131 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A DocumentChange with the watch target ID in the removed_target_ids field is the
-# same as deleting a document.
-
-description: "listen: DocumentChange with removed_target_id is like a delete."
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- removed_target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 1
- >
- >
- snapshots: <
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- new_index: -1
- >
- read_time: <
- seconds: 2
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-reset.textproto b/tests/unit/v1beta1/testdata/listen-reset.textproto
deleted file mode 100644
index 89a75df278..0000000000
--- a/tests/unit/v1beta1/testdata/listen-reset.textproto
+++ /dev/null
@@ -1,382 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A RESET message turns off the CURRENT state, and marks all documents as deleted.
-
-# If a document appeared on the stream but was never part of a snapshot ("d3" in
-# this test), a reset will make it disappear completely.
-
-# For a snapshot to happen at a NO_CHANGE reponse, we need to have both seen a
-# CURRENT response, and have a change from the previous snapshot. Here, after the
-# reset, we see the same version of d2 again. That doesn't result in a snapshot.
-
-description: "listen: RESET turns off CURRENT"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: RESET
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 2
- >
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 3
- >
- >
- >
- responses: <
- target_change: <
- target_change_type: RESET
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 4
- >
- >
- >
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 5
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- old_index: -1
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- new_index: 1
- >
- read_time: <
- seconds: 1
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- changes: <
- kind: REMOVED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 2
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: 1
- new_index: -1
- >
- changes: <
- kind: MODIFIED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- >
- read_time: <
- seconds: 3
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d2"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 3
- >
- >
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d3"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 2
- >
- >
- old_index: -1
- new_index: 1
- >
- read_time: <
- seconds: 5
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-target-add-nop.textproto b/tests/unit/v1beta1/testdata/listen-target-add-nop.textproto
deleted file mode 100644
index 3fa7cce56e..0000000000
--- a/tests/unit/v1beta1/testdata/listen-target-add-nop.textproto
+++ /dev/null
@@ -1,88 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A TargetChange_ADD response must have the same watch target ID.
-
-description: "listen: TargetChange_ADD is a no-op if it has the same target ID"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- target_change_type: ADD
- target_ids: 1
- read_time: <
- seconds: 2
- >
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- snapshots: <
- docs: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- changes: <
- kind: ADDED
- doc: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- old_index: -1
- >
- read_time: <
- seconds: 1
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto b/tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto
deleted file mode 100644
index 87544637b5..0000000000
--- a/tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto
+++ /dev/null
@@ -1,50 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A TargetChange_ADD response must have the same watch target ID.
-
-description: "listen: TargetChange_ADD is an error if it has a different target ID"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- target_change_type: ADD
- target_ids: 2
- read_time: <
- seconds: 2
- >
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/listen-target-remove.textproto b/tests/unit/v1beta1/testdata/listen-target-remove.textproto
deleted file mode 100644
index f34b0890c3..0000000000
--- a/tests/unit/v1beta1/testdata/listen-target-remove.textproto
+++ /dev/null
@@ -1,46 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A TargetChange_REMOVE response should never be sent.
-
-description: "listen: TargetChange_REMOVE should not appear"
-listen: <
- responses: <
- document_change: <
- document: <
- name: "projects/projectID/databases/(default)/documents/C/d1"
- fields: <
- key: "a"
- value: <
- integer_value: 3
- >
- >
- create_time: <
- seconds: 1
- >
- update_time: <
- seconds: 1
- >
- >
- target_ids: 1
- >
- >
- responses: <
- target_change: <
- target_change_type: CURRENT
- >
- >
- responses: <
- target_change: <
- target_change_type: REMOVE
- >
- >
- responses: <
- target_change: <
- read_time: <
- seconds: 1
- >
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto b/tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto
deleted file mode 100644
index 3c926da963..0000000000
--- a/tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayRemove is not permitted in queries.
-
-description: "query: ArrayRemove in cursor method"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- end_before: <
- json_values: "[\"ArrayRemove\", 1, 2, 3]"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-arrayremove-where.textproto b/tests/unit/v1beta1/testdata/query-arrayremove-where.textproto
deleted file mode 100644
index 000b76350e..0000000000
--- a/tests/unit/v1beta1/testdata/query-arrayremove-where.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayRemove is not permitted in queries.
-
-description: "query: ArrayRemove in Where"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "[\"ArrayRemove\", 1, 2, 3]"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto b/tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto
deleted file mode 100644
index e8a61104d1..0000000000
--- a/tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayUnion is not permitted in queries.
-
-description: "query: ArrayUnion in cursor method"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- end_before: <
- json_values: "[\"ArrayUnion\", 1, 2, 3]"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-arrayunion-where.textproto b/tests/unit/v1beta1/testdata/query-arrayunion-where.textproto
deleted file mode 100644
index 94923134e2..0000000000
--- a/tests/unit/v1beta1/testdata/query-arrayunion-where.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayUnion is not permitted in queries.
-
-description: "query: ArrayUnion in Where"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "[\"ArrayUnion\", 1, 2, 3]"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-bad-NaN.textproto b/tests/unit/v1beta1/testdata/query-bad-NaN.textproto
deleted file mode 100644
index 6806dd04ab..0000000000
--- a/tests/unit/v1beta1/testdata/query-bad-NaN.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# You can only compare NaN for equality.
-
-description: "query: where clause with non-== comparison with NaN"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "<"
- json_value: "\"NaN\""
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-bad-null.textproto b/tests/unit/v1beta1/testdata/query-bad-null.textproto
deleted file mode 100644
index 7fdfb3f2b5..0000000000
--- a/tests/unit/v1beta1/testdata/query-bad-null.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# You can only compare Null for equality.
-
-description: "query: where clause with non-== comparison with Null"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: ">"
- json_value: "null"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto
deleted file mode 100644
index bab8601e8d..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto
+++ /dev/null
@@ -1,68 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When a document snapshot is used, the client appends a __name__ order-by clause
-# with the direction of the last order-by clause.
-
-description: "query: cursor methods with a document snapshot, existing orderBy"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- order_by: <
- path: <
- field: "b"
- >
- direction: "desc"
- >
- >
- clauses: <
- start_after: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- order_by: <
- field: <
- field_path: "b"
- >
- direction: DESCENDING
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: DESCENDING
- >
- start_at: <
- values: <
- integer_value: 7
- >
- values: <
- integer_value: 8
- >
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto
deleted file mode 100644
index d0ce3df45a..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto
+++ /dev/null
@@ -1,76 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If there is an existing orderBy clause on __name__, no changes are made to the
-# list of orderBy clauses.
-
-description: "query: cursor method, doc snapshot, existing orderBy __name__"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "desc"
- >
- >
- clauses: <
- order_by: <
- path: <
- field: "__name__"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_at: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- clauses: <
- end_at: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: DESCENDING
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- integer_value: 7
- >
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- before: true
- >
- end_at: <
- values: <
- integer_value: 7
- >
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto
deleted file mode 100644
index 8b1e217df5..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto
+++ /dev/null
@@ -1,53 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Where clause using equality doesn't change the implicit orderBy clauses.
-
-description: "query: cursor methods with a document snapshot and an equality where clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "3"
- >
- >
- clauses: <
- end_at: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- field_filter: <
- field: <
- field_path: "a"
- >
- op: EQUAL
- value: <
- integer_value: 3
- >
- >
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: ASCENDING
- >
- end_at: <
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto
deleted file mode 100644
index a69edfc50d..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto
+++ /dev/null
@@ -1,72 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If there is an OrderBy clause, the inequality Where clause does not result in a
-# new OrderBy clause. We still add a __name__ OrderBy clause
-
-description: "query: cursor method, doc snapshot, inequality where clause, and existing orderBy clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "desc"
- >
- >
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "<"
- json_value: "4"
- >
- >
- clauses: <
- start_at: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- field_filter: <
- field: <
- field_path: "a"
- >
- op: LESS_THAN
- value: <
- integer_value: 4
- >
- >
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: DESCENDING
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: DESCENDING
- >
- start_at: <
- values: <
- integer_value: 7
- >
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto
deleted file mode 100644
index 871dd0ba33..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto
+++ /dev/null
@@ -1,64 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Where clause with an inequality results in an OrderBy clause on that clause's
-# path, if there are no other OrderBy clauses.
-
-description: "query: cursor method with a document snapshot and an inequality where clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "<="
- json_value: "3"
- >
- >
- clauses: <
- end_before: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- field_filter: <
- field: <
- field_path: "a"
- >
- op: LESS_THAN_OR_EQUAL
- value: <
- integer_value: 3
- >
- >
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: ASCENDING
- >
- end_at: <
- values: <
- integer_value: 7
- >
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto
deleted file mode 100644
index 184bffc2d3..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto
+++ /dev/null
@@ -1,34 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When a document snapshot is used, the client appends a __name__ order-by clause.
-
-description: "query: cursor methods with a document snapshot"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- start_at: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D"
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto b/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto
deleted file mode 100644
index c197d23afe..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto
+++ /dev/null
@@ -1,41 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods are allowed to use empty maps with EndBefore. It should result in
-# an empty map in the query.
-
-description: "query: EndBefore with explicit empty map"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- end_before: <
- json_values: "{}"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- end_at: <
- values: <
- map_value: <
- >
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto b/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto
deleted file mode 100644
index a41775abf0..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods are not allowed to use empty values with EndBefore. It should
-# result in an error.
-
-description: "query: EndBefore with empty values"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- end_before: <
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-no-order.textproto b/tests/unit/v1beta1/testdata/query-cursor-no-order.textproto
deleted file mode 100644
index fb999ddabb..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-no-order.textproto
+++ /dev/null
@@ -1,16 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a cursor method with a list of values is provided, there must be at least as
-# many explicit orderBy clauses as values.
-
-description: "query: cursor method without orderBy"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- start_at: <
- json_values: "2"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto b/tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto
deleted file mode 100644
index 557aca2c91..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto
+++ /dev/null
@@ -1,41 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods are allowed to use empty maps with StartAt. It should result in
-# an empty map in the query.
-
-description: "query: StartAt with explicit empty map"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_at: <
- json_values: "{}"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- map_value: <
- >
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto b/tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto
deleted file mode 100644
index e0c54d98a6..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods are not allowed to use empty values with StartAt. It should
-# result in an error.
-
-description: "query: StartAt with empty values"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_at: <
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto
deleted file mode 100644
index bb08ab7d4d..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto
+++ /dev/null
@@ -1,50 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods take the same number of values as there are OrderBy clauses.
-
-description: "query: StartAt/EndBefore with values"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_at: <
- json_values: "7"
- >
- >
- clauses: <
- end_before: <
- json_values: "9"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- integer_value: 7
- >
- before: true
- >
- end_at: <
- values: <
- integer_value: 9
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto
deleted file mode 100644
index 41e69e9e6f..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto
+++ /dev/null
@@ -1,48 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods take the same number of values as there are OrderBy clauses.
-
-description: "query: StartAfter/EndAt with values"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_after: <
- json_values: "7"
- >
- >
- clauses: <
- end_at: <
- json_values: "9"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- integer_value: 7
- >
- >
- end_at: <
- values: <
- integer_value: 9
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto
deleted file mode 100644
index 8e37ad0035..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto
+++ /dev/null
@@ -1,71 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor methods take the same number of values as there are OrderBy clauses.
-
-description: "query: Start/End with two values"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- order_by: <
- path: <
- field: "b"
- >
- direction: "desc"
- >
- >
- clauses: <
- start_at: <
- json_values: "7"
- json_values: "8"
- >
- >
- clauses: <
- end_at: <
- json_values: "9"
- json_values: "10"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- order_by: <
- field: <
- field_path: "b"
- >
- direction: DESCENDING
- >
- start_at: <
- values: <
- integer_value: 7
- >
- values: <
- integer_value: 8
- >
- before: true
- >
- end_at: <
- values: <
- integer_value: 9
- >
- values: <
- integer_value: 10
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto
deleted file mode 100644
index 91af3486c9..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto
+++ /dev/null
@@ -1,50 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Cursor values corresponding to a __name__ field take the document path relative
-# to the query's collection.
-
-description: "query: cursor methods with __name__"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "__name__"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_after: <
- json_values: "\"D1\""
- >
- >
- clauses: <
- end_before: <
- json_values: "\"D2\""
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "__name__"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D1"
- >
- >
- end_at: <
- values: <
- reference_value: "projects/projectID/databases/(default)/documents/C/D2"
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto
deleted file mode 100644
index 9e8fbb19f3..0000000000
--- a/tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto
+++ /dev/null
@@ -1,60 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When multiple Start* or End* calls occur, the values of the last one are used.
-
-description: "query: cursor methods, last one wins"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- start_after: <
- json_values: "1"
- >
- >
- clauses: <
- start_at: <
- json_values: "2"
- >
- >
- clauses: <
- end_at: <
- json_values: "3"
- >
- >
- clauses: <
- end_before: <
- json_values: "4"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: ASCENDING
- >
- start_at: <
- values: <
- integer_value: 2
- >
- before: true
- >
- end_at: <
- values: <
- integer_value: 4
- >
- before: true
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-del-cursor.textproto b/tests/unit/v1beta1/testdata/query-del-cursor.textproto
deleted file mode 100644
index c9d4adb7c5..0000000000
--- a/tests/unit/v1beta1/testdata/query-del-cursor.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Sentinel values are not permitted in queries.
-
-description: "query: Delete in cursor method"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- end_before: <
- json_values: "\"Delete\""
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-del-where.textproto b/tests/unit/v1beta1/testdata/query-del-where.textproto
deleted file mode 100644
index 8e92529492..0000000000
--- a/tests/unit/v1beta1/testdata/query-del-where.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Sentinel values are not permitted in queries.
-
-description: "query: Delete in Where"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "\"Delete\""
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-invalid-operator.textproto b/tests/unit/v1beta1/testdata/query-invalid-operator.textproto
deleted file mode 100644
index e580c64a75..0000000000
--- a/tests/unit/v1beta1/testdata/query-invalid-operator.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The != operator is not supported.
-
-description: "query: invalid operator in Where clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "!="
- json_value: "4"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-invalid-path-order.textproto b/tests/unit/v1beta1/testdata/query-invalid-path-order.textproto
deleted file mode 100644
index e0a7205762..0000000000
--- a/tests/unit/v1beta1/testdata/query-invalid-path-order.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The path has an empty component.
-
-description: "query: invalid path in OrderBy clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "*"
- field: ""
- >
- direction: "asc"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-invalid-path-select.textproto b/tests/unit/v1beta1/testdata/query-invalid-path-select.textproto
deleted file mode 100644
index 944f984f7f..0000000000
--- a/tests/unit/v1beta1/testdata/query-invalid-path-select.textproto
+++ /dev/null
@@ -1,18 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The path has an empty component.
-
-description: "query: invalid path in Where clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- select: <
- fields: <
- field: "*"
- field: ""
- >
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-invalid-path-where.textproto b/tests/unit/v1beta1/testdata/query-invalid-path-where.textproto
deleted file mode 100644
index 527923b097..0000000000
--- a/tests/unit/v1beta1/testdata/query-invalid-path-where.textproto
+++ /dev/null
@@ -1,20 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The path has an empty component.
-
-description: "query: invalid path in Where clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "*"
- field: ""
- >
- op: "=="
- json_value: "4"
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto b/tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto
deleted file mode 100644
index dc301f439e..0000000000
--- a/tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto
+++ /dev/null
@@ -1,30 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# With multiple Offset or Limit clauses, the last one wins.
-
-description: "query: multiple Offset and Limit clauses"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- offset: 2
- >
- clauses: <
- limit: 3
- >
- clauses: <
- limit: 4
- >
- clauses: <
- offset: 5
- >
- query: <
- from: <
- collection_id: "C"
- >
- offset: 5
- limit: <
- value: 4
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-offset-limit.textproto b/tests/unit/v1beta1/testdata/query-offset-limit.textproto
deleted file mode 100644
index 136d9d46a6..0000000000
--- a/tests/unit/v1beta1/testdata/query-offset-limit.textproto
+++ /dev/null
@@ -1,24 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Offset and Limit clauses.
-
-description: "query: Offset and Limit clauses"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- offset: 2
- >
- clauses: <
- limit: 3
- >
- query: <
- from: <
- collection_id: "C"
- >
- offset: 2
- limit: <
- value: 3
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-order.textproto b/tests/unit/v1beta1/testdata/query-order.textproto
deleted file mode 100644
index 7ed4c4ead8..0000000000
--- a/tests/unit/v1beta1/testdata/query-order.textproto
+++ /dev/null
@@ -1,42 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Multiple OrderBy clauses combine.
-
-description: "query: basic OrderBy clauses"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "b"
- >
- direction: "asc"
- >
- >
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "desc"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- order_by: <
- field: <
- field_path: "b"
- >
- direction: ASCENDING
- >
- order_by: <
- field: <
- field_path: "a"
- >
- direction: DESCENDING
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-select-empty.textproto b/tests/unit/v1beta1/testdata/query-select-empty.textproto
deleted file mode 100644
index def8b55ac5..0000000000
--- a/tests/unit/v1beta1/testdata/query-select-empty.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An empty Select clause selects just the document ID.
-
-description: "query: empty Select clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- select: <
- >
- >
- query: <
- select: <
- fields: <
- field_path: "__name__"
- >
- >
- from: <
- collection_id: "C"
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-select-last-wins.textproto b/tests/unit/v1beta1/testdata/query-select-last-wins.textproto
deleted file mode 100644
index bd78d09eb9..0000000000
--- a/tests/unit/v1beta1/testdata/query-select-last-wins.textproto
+++ /dev/null
@@ -1,36 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The last Select clause is the only one used.
-
-description: "query: two Select clauses"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- select: <
- fields: <
- field: "a"
- >
- fields: <
- field: "b"
- >
- >
- >
- clauses: <
- select: <
- fields: <
- field: "c"
- >
- >
- >
- query: <
- select: <
- fields: <
- field_path: "c"
- >
- >
- from: <
- collection_id: "C"
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-select.textproto b/tests/unit/v1beta1/testdata/query-select.textproto
deleted file mode 100644
index 15e1124973..0000000000
--- a/tests/unit/v1beta1/testdata/query-select.textproto
+++ /dev/null
@@ -1,32 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ordinary Select clause.
-
-description: "query: Select clause with some fields"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- select: <
- fields: <
- field: "a"
- >
- fields: <
- field: "b"
- >
- >
- >
- query: <
- select: <
- fields: <
- field_path: "a"
- >
- fields: <
- field_path: "b"
- >
- >
- from: <
- collection_id: "C"
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-st-cursor.textproto b/tests/unit/v1beta1/testdata/query-st-cursor.textproto
deleted file mode 100644
index 66885d0dd5..0000000000
--- a/tests/unit/v1beta1/testdata/query-st-cursor.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Sentinel values are not permitted in queries.
-
-description: "query: ServerTimestamp in cursor method"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- order_by: <
- path: <
- field: "a"
- >
- direction: "asc"
- >
- >
- clauses: <
- end_before: <
- json_values: "\"ServerTimestamp\""
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-st-where.textproto b/tests/unit/v1beta1/testdata/query-st-where.textproto
deleted file mode 100644
index 05da28d542..0000000000
--- a/tests/unit/v1beta1/testdata/query-st-where.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Sentinel values are not permitted in queries.
-
-description: "query: ServerTimestamp in Where"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "\"ServerTimestamp\""
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/query-where-2.textproto b/tests/unit/v1beta1/testdata/query-where-2.textproto
deleted file mode 100644
index 1034463079..0000000000
--- a/tests/unit/v1beta1/testdata/query-where-2.textproto
+++ /dev/null
@@ -1,59 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Multiple Where clauses are combined into a composite filter.
-
-description: "query: two Where clauses"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: ">="
- json_value: "5"
- >
- >
- clauses: <
- where: <
- path: <
- field: "b"
- >
- op: "<"
- json_value: "\"foo\""
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- composite_filter: <
- op: AND
- filters: <
- field_filter: <
- field: <
- field_path: "a"
- >
- op: GREATER_THAN_OR_EQUAL
- value: <
- integer_value: 5
- >
- >
- >
- filters: <
- field_filter: <
- field: <
- field_path: "b"
- >
- op: LESS_THAN
- value: <
- string_value: "foo"
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-where-NaN.textproto b/tests/unit/v1beta1/testdata/query-where-NaN.textproto
deleted file mode 100644
index 4a97ca7dde..0000000000
--- a/tests/unit/v1beta1/testdata/query-where-NaN.textproto
+++ /dev/null
@@ -1,31 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Where clause that tests for equality with NaN results in a unary filter.
-
-description: "query: a Where clause comparing to NaN"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "\"NaN\""
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- unary_filter: <
- op: IS_NAN
- field: <
- field_path: "a"
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-where-null.textproto b/tests/unit/v1beta1/testdata/query-where-null.textproto
deleted file mode 100644
index 1869c60c72..0000000000
--- a/tests/unit/v1beta1/testdata/query-where-null.textproto
+++ /dev/null
@@ -1,31 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Where clause that tests for equality with null results in a unary filter.
-
-description: "query: a Where clause comparing to null"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: "=="
- json_value: "null"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- unary_filter: <
- op: IS_NULL
- field: <
- field_path: "a"
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-where.textproto b/tests/unit/v1beta1/testdata/query-where.textproto
deleted file mode 100644
index 045c2befab..0000000000
--- a/tests/unit/v1beta1/testdata/query-where.textproto
+++ /dev/null
@@ -1,34 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A simple Where clause.
-
-description: "query: Where clause"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- where: <
- path: <
- field: "a"
- >
- op: ">"
- json_value: "5"
- >
- >
- query: <
- from: <
- collection_id: "C"
- >
- where: <
- field_filter: <
- field: <
- field_path: "a"
- >
- op: GREATER_THAN
- value: <
- integer_value: 5
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/query-wrong-collection.textproto b/tests/unit/v1beta1/testdata/query-wrong-collection.textproto
deleted file mode 100644
index ad6f353d5f..0000000000
--- a/tests/unit/v1beta1/testdata/query-wrong-collection.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a document snapshot is passed to a Start*/End* method, it must be in the same
-# collection as the query.
-
-description: "query: doc snapshot with wrong collection in cursor method"
-query: <
- coll_path: "projects/projectID/databases/(default)/documents/C"
- clauses: <
- end_before: <
- doc_snapshot: <
- path: "projects/projectID/databases/(default)/documents/C2/D"
- json_data: "{\"a\": 7, \"b\": 8}"
- >
- >
- >
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-all-transforms.textproto b/tests/unit/v1beta1/testdata/set-all-transforms.textproto
deleted file mode 100644
index bf18f9a5b1..0000000000
--- a/tests/unit/v1beta1/testdata/set-all-transforms.textproto
+++ /dev/null
@@ -1,61 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can be created with any amount of transforms.
-
-description: "set: all transforms in a single call"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto
deleted file mode 100644
index 9b62fe1919..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto
+++ /dev/null
@@ -1,58 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayRemove field. Since all the ArrayRemove
-# fields are removed, the only field in the update is "a".
-
-description: "set: multiple ArrayRemove fields"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto
deleted file mode 100644
index 617609c5a3..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayRemove value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "set: nested ArrayRemove field"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto
deleted file mode 100644
index 2efa34a59f..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayRemove. Firestore transforms don't support array indexing.
-
-description: "set: ArrayRemove cannot be anywhere inside an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto
deleted file mode 100644
index e7aa209ea2..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayRemove must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "set: ArrayRemove cannot be in an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto
deleted file mode 100644
index 353025b59f..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "set: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayremove.textproto b/tests/unit/v1beta1/testdata/set-arrayremove.textproto
deleted file mode 100644
index 8aa6b60d01..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayremove.textproto
+++ /dev/null
@@ -1,44 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayRemove is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "set: ArrayRemove with data"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto
deleted file mode 100644
index e515bfa8d1..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto
+++ /dev/null
@@ -1,58 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayUnion field. Since all the ArrayUnion
-# fields are removed, the only field in the update is "a".
-
-description: "set: multiple ArrayUnion fields"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- append_missing_elements: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto
deleted file mode 100644
index f8abeb0d00..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayUnion value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "set: nested ArrayUnion field"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto
deleted file mode 100644
index 2b4170f431..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayUnion. Firestore transforms don't support array indexing.
-
-description: "set: ArrayUnion cannot be anywhere inside an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto
deleted file mode 100644
index e08af3a07f..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayUnion must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "set: ArrayUnion cannot be in an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto
deleted file mode 100644
index 37a7a132e7..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "set: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-arrayunion.textproto b/tests/unit/v1beta1/testdata/set-arrayunion.textproto
deleted file mode 100644
index 4751e0c0e3..0000000000
--- a/tests/unit/v1beta1/testdata/set-arrayunion.textproto
+++ /dev/null
@@ -1,44 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayUnion is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "set: ArrayUnion with data"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-basic.textproto b/tests/unit/v1beta1/testdata/set-basic.textproto
deleted file mode 100644
index e9b292e3cd..0000000000
--- a/tests/unit/v1beta1/testdata/set-basic.textproto
+++ /dev/null
@@ -1,24 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A simple call, resulting in a single update operation.
-
-description: "set: basic"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-complex.textproto b/tests/unit/v1beta1/testdata/set-complex.textproto
deleted file mode 100644
index 6ec19500a2..0000000000
--- a/tests/unit/v1beta1/testdata/set-complex.textproto
+++ /dev/null
@@ -1,58 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A call to a write method with complicated input data.
-
-description: "set: complex"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- array_value: <
- values: <
- integer_value: 1
- >
- values: <
- double_value: 2.5
- >
- >
- >
- >
- fields: <
- key: "b"
- value: <
- map_value: <
- fields: <
- key: "c"
- value: <
- array_value: <
- values: <
- string_value: "three"
- >
- values: <
- map_value: <
- fields: <
- key: "d"
- value: <
- boolean_value: true
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-del-merge-alone.textproto b/tests/unit/v1beta1/testdata/set-del-merge-alone.textproto
deleted file mode 100644
index 811ab8dfe7..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-merge-alone.textproto
+++ /dev/null
@@ -1,28 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Delete sentinel can appear with a merge option. If the delete paths are the
-# only ones to be merged, then no document is sent, just an update mask.
-
-description: "set-merge: Delete with merge"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "b"
- field: "c"
- >
- >
- json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- update_mask: <
- field_paths: "b.c"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-del-merge.textproto b/tests/unit/v1beta1/testdata/set-del-merge.textproto
deleted file mode 100644
index b8d8631051..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-merge.textproto
+++ /dev/null
@@ -1,37 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Delete sentinel can appear with a merge option.
-
-description: "set-merge: Delete with merge"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "a"
- >
- fields: <
- field: "b"
- field: "c"
- >
- >
- json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b.c"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-del-mergeall.textproto b/tests/unit/v1beta1/testdata/set-del-mergeall.textproto
deleted file mode 100644
index af1e84524b..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-mergeall.textproto
+++ /dev/null
@@ -1,31 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A Delete sentinel can appear with a mergeAll option.
-
-description: "set: Delete with MergeAll"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- all: true
- >
- json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b.c"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto
deleted file mode 100644
index bbf6a3d00a..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "set: Delete cannot be anywhere inside an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-del-noarray.textproto b/tests/unit/v1beta1/testdata/set-del-noarray.textproto
deleted file mode 100644
index 07fc6497dc..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-noarray.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "set: Delete cannot be in an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, \"Delete\"]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-del-nomerge.textproto b/tests/unit/v1beta1/testdata/set-del-nomerge.textproto
deleted file mode 100644
index cb6ef4f858..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-nomerge.textproto
+++ /dev/null
@@ -1,17 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The client signals an error if the Delete sentinel is in the input data, but not
-# selected by a merge option, because this is most likely a programming bug.
-
-description: "set-merge: Delete cannot appear in an unmerged field"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "a"
- >
- >
- json_data: "{\"a\": 1, \"b\": \"Delete\"}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-del-nonleaf.textproto b/tests/unit/v1beta1/testdata/set-del-nonleaf.textproto
deleted file mode 100644
index 54f22d95c5..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-nonleaf.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a Delete is part of the value at a merge path, then the user is confused:
-# their merge path says "replace this entire value" but their Delete says "delete
-# this part of the value". This should be an error, just as if they specified
-# Delete in a Set with no merge.
-
-description: "set-merge: Delete cannot appear as part of a merge path"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "h"
- >
- >
- json_data: "{\"h\": {\"g\": \"Delete\"}}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-del-wo-merge.textproto b/tests/unit/v1beta1/testdata/set-del-wo-merge.textproto
deleted file mode 100644
index 29196628bf..0000000000
--- a/tests/unit/v1beta1/testdata/set-del-wo-merge.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Without a merge option, Set replaces the document with the input data. A Delete
-# sentinel in the data makes no sense in this case.
-
-description: "set: Delete cannot appear unless a merge option is specified"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"Delete\"}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-empty.textproto b/tests/unit/v1beta1/testdata/set-empty.textproto
deleted file mode 100644
index c2b73d3ff9..0000000000
--- a/tests/unit/v1beta1/testdata/set-empty.textproto
+++ /dev/null
@@ -1,17 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-
-description: "set: creating or setting an empty map"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-merge-fp.textproto b/tests/unit/v1beta1/testdata/set-merge-fp.textproto
deleted file mode 100644
index 68690f6f16..0000000000
--- a/tests/unit/v1beta1/testdata/set-merge-fp.textproto
+++ /dev/null
@@ -1,40 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A merge with fields that use special characters.
-
-description: "set-merge: Merge with FieldPaths"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "*"
- field: "~"
- >
- >
- json_data: "{\"*\": {\"~\": true}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "*"
- value: <
- map_value: <
- fields: <
- key: "~"
- value: <
- boolean_value: true
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "`*`.`~`"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-merge-nested.textproto b/tests/unit/v1beta1/testdata/set-merge-nested.textproto
deleted file mode 100644
index 0d1282818d..0000000000
--- a/tests/unit/v1beta1/testdata/set-merge-nested.textproto
+++ /dev/null
@@ -1,41 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A merge option where the field is not at top level. Only fields mentioned in the
-# option are present in the update operation.
-
-description: "set-merge: Merge with a nested field"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "h"
- field: "g"
- >
- >
- json_data: "{\"h\": {\"g\": 4, \"f\": 5}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "h"
- value: <
- map_value: <
- fields: <
- key: "g"
- value: <
- integer_value: 4
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "h.g"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto b/tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto
deleted file mode 100644
index ca41cb0340..0000000000
--- a/tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto
+++ /dev/null
@@ -1,46 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a field path is in a merge option, the value at that path replaces the stored
-# value. That is true even if the value is complex.
-
-description: "set-merge: Merge field is not a leaf"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "h"
- >
- >
- json_data: "{\"h\": {\"f\": 5, \"g\": 6}, \"e\": 7}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "h"
- value: <
- map_value: <
- fields: <
- key: "f"
- value: <
- integer_value: 5
- >
- >
- fields: <
- key: "g"
- value: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "h"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-merge-prefix.textproto b/tests/unit/v1beta1/testdata/set-merge-prefix.textproto
deleted file mode 100644
index 1e2c2c5022..0000000000
--- a/tests/unit/v1beta1/testdata/set-merge-prefix.textproto
+++ /dev/null
@@ -1,21 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The prefix would make the other path meaningless, so this is probably a
-# programming error.
-
-description: "set-merge: One merge path cannot be the prefix of another"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "a"
- >
- fields: <
- field: "a"
- field: "b"
- >
- >
- json_data: "{\"a\": {\"b\": 1}}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-merge-present.textproto b/tests/unit/v1beta1/testdata/set-merge-present.textproto
deleted file mode 100644
index f6665de5cd..0000000000
--- a/tests/unit/v1beta1/testdata/set-merge-present.textproto
+++ /dev/null
@@ -1,20 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The client signals an error if a merge option mentions a path that is not in the
-# input data.
-
-description: "set-merge: Merge fields must all be present in data"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "b"
- >
- fields: <
- field: "a"
- >
- >
- json_data: "{\"a\": 1}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-merge.textproto b/tests/unit/v1beta1/testdata/set-merge.textproto
deleted file mode 100644
index 279125253c..0000000000
--- a/tests/unit/v1beta1/testdata/set-merge.textproto
+++ /dev/null
@@ -1,32 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Fields in the input data but not in a merge option are pruned.
-
-description: "set-merge: Merge with a field"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "a"
- >
- >
- json_data: "{\"a\": 1, \"b\": 2}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-mergeall-empty.textproto b/tests/unit/v1beta1/testdata/set-mergeall-empty.textproto
deleted file mode 100644
index 16df8a22be..0000000000
--- a/tests/unit/v1beta1/testdata/set-mergeall-empty.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# This is a valid call that can be used to ensure a document exists.
-
-description: "set: MergeAll can be specified with empty data."
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- all: true
- >
- json_data: "{}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- update_mask: <
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-mergeall-nested.textproto b/tests/unit/v1beta1/testdata/set-mergeall-nested.textproto
deleted file mode 100644
index 1fbc6973cd..0000000000
--- a/tests/unit/v1beta1/testdata/set-mergeall-nested.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# MergeAll with nested fields results in an update mask that includes entries for
-# all the leaf fields.
-
-description: "set: MergeAll with nested fields"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- all: true
- >
- json_data: "{\"h\": { \"g\": 3, \"f\": 4 }}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "h"
- value: <
- map_value: <
- fields: <
- key: "f"
- value: <
- integer_value: 4
- >
- >
- fields: <
- key: "g"
- value: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "h.f"
- field_paths: "h.g"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-mergeall.textproto b/tests/unit/v1beta1/testdata/set-mergeall.textproto
deleted file mode 100644
index cb2ebc52bc..0000000000
--- a/tests/unit/v1beta1/testdata/set-mergeall.textproto
+++ /dev/null
@@ -1,37 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The MergeAll option with a simple piece of data.
-
-description: "set: MergeAll"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- all: true
- >
- json_data: "{\"a\": 1, \"b\": 2}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- fields: <
- key: "b"
- value: <
- integer_value: 2
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-nodel.textproto b/tests/unit/v1beta1/testdata/set-nodel.textproto
deleted file mode 100644
index 0fb887d461..0000000000
--- a/tests/unit/v1beta1/testdata/set-nodel.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel cannot be used in Create, or in Set without a Merge option.
-
-description: "set: Delete cannot appear in data"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"Delete\"}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-nosplit.textproto b/tests/unit/v1beta1/testdata/set-nosplit.textproto
deleted file mode 100644
index 0ff3fadcf4..0000000000
--- a/tests/unit/v1beta1/testdata/set-nosplit.textproto
+++ /dev/null
@@ -1,37 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Create and Set treat their map keys literally. They do not split on dots.
-
-description: "set: don\342\200\231t split on dots"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a.b"
- value: <
- map_value: <
- fields: <
- key: "c.d"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- fields: <
- key: "e"
- value: <
- integer_value: 2
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-special-chars.textproto b/tests/unit/v1beta1/testdata/set-special-chars.textproto
deleted file mode 100644
index f4122c9f00..0000000000
--- a/tests/unit/v1beta1/testdata/set-special-chars.textproto
+++ /dev/null
@@ -1,38 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Create and Set treat their map keys literally. They do not escape special
-# characters.
-
-description: "set: non-alpha characters in map keys"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "*"
- value: <
- map_value: <
- fields: <
- key: "."
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- fields: <
- key: "~"
- value: <
- integer_value: 2
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto b/tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto
deleted file mode 100644
index 16ce4cfbd9..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto
+++ /dev/null
@@ -1,26 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ServerTimestamps, then no update operation
-# should be produced.
-
-description: "set: ServerTimestamp alone with MergeAll"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- all: true
- >
- json_data: "{\"a\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-alone.textproto b/tests/unit/v1beta1/testdata/set-st-alone.textproto
deleted file mode 100644
index 6ce46d7f1a..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-alone.textproto
+++ /dev/null
@@ -1,28 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ServerTimestamps, then an update operation
-# with an empty map should be produced.
-
-description: "set: ServerTimestamp alone"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-merge-both.textproto b/tests/unit/v1beta1/testdata/set-st-merge-both.textproto
deleted file mode 100644
index 5cc7bbc9ef..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-merge-both.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Just as when no merge option is specified, ServerTimestamp sentinel values are
-# removed from the data in the update operation and become transforms.
-
-description: "set-merge: ServerTimestamp with Merge of both fields"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "a"
- >
- fields: <
- field: "b"
- >
- >
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto b/tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto
deleted file mode 100644
index f513b6c804..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto
+++ /dev/null
@@ -1,37 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a field path is in a merge option, the value at that path replaces the stored
-# value. If the value has only ServerTimestamps, they become transforms and we
-# clear the value by including the field path in the update mask.
-
-description: "set-merge: non-leaf merge field with ServerTimestamp alone"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "h"
- >
- >
- json_data: "{\"h\": {\"g\": \"ServerTimestamp\"}, \"e\": 7}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- update_mask: <
- field_paths: "h"
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "h.g"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto b/tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto
deleted file mode 100644
index e53e7e2682..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto
+++ /dev/null
@@ -1,49 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a field path is in a merge option, the value at that path replaces the stored
-# value, and ServerTimestamps inside that value become transforms as usual.
-
-description: "set-merge: non-leaf merge field with ServerTimestamp"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "h"
- >
- >
- json_data: "{\"h\": {\"f\": 5, \"g\": \"ServerTimestamp\"}, \"e\": 7}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "h"
- value: <
- map_value: <
- fields: <
- key: "f"
- value: <
- integer_value: 5
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "h"
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "h.g"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto b/tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto
deleted file mode 100644
index 3222230dc5..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto
+++ /dev/null
@@ -1,28 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If all the fields in the merge option have ServerTimestamp values, then no
-# update operation is produced, only a transform.
-
-description: "set-merge: If no ordinary values in Merge, no write"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "b"
- >
- >
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-mergeall.textproto b/tests/unit/v1beta1/testdata/set-st-mergeall.textproto
deleted file mode 100644
index b8c53a566f..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-mergeall.textproto
+++ /dev/null
@@ -1,40 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Just as when no merge option is specified, ServerTimestamp sentinel values are
-# removed from the data in the update operation and become transforms.
-
-description: "set: ServerTimestamp with MergeAll"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- all: true
- >
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-multi.textproto b/tests/unit/v1beta1/testdata/set-st-multi.textproto
deleted file mode 100644
index 375ec18d68..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-multi.textproto
+++ /dev/null
@@ -1,38 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ServerTimestamp field. Since all the
-# ServerTimestamp fields are removed, the only field in the update is "a".
-
-description: "set: multiple ServerTimestamp fields"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c.d"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-nested.textproto b/tests/unit/v1beta1/testdata/set-st-nested.textproto
deleted file mode 100644
index abfd2e8fd8..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-nested.textproto
+++ /dev/null
@@ -1,35 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A ServerTimestamp value can occur at any depth. In this case, the transform
-# applies to the field path "b.c". Since "c" is removed from the update, "b"
-# becomes empty, so it is also removed from the update.
-
-description: "set: nested ServerTimestamp field"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto
deleted file mode 100644
index 241d79151a..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ServerTimestamp sentinel. Firestore transforms don't support array indexing.
-
-description: "set: ServerTimestamp cannot be anywhere inside an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-st-noarray.textproto b/tests/unit/v1beta1/testdata/set-st-noarray.textproto
deleted file mode 100644
index 591fb03438..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. Firestore transforms
-# don't support array indexing.
-
-description: "set: ServerTimestamp cannot be in an array value"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/set-st-nomerge.textproto b/tests/unit/v1beta1/testdata/set-st-nomerge.textproto
deleted file mode 100644
index 20c0ae1fbb..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-nomerge.textproto
+++ /dev/null
@@ -1,33 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the ServerTimestamp value is not mentioned in a merge option, then it is
-# pruned from the data but does not result in a transform.
-
-description: "set-merge: If is ServerTimestamp not in Merge, no transform"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- option: <
- fields: <
- field: "a"
- >
- >
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto
deleted file mode 100644
index 5e187983f9..0000000000
--- a/tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto
+++ /dev/null
@@ -1,42 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp
-# should be stripped out but the empty map should remain.
-
-description: "set: ServerTimestamp beside an empty map"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- map_value: <
- >
- >
- >
- >
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/set-st.textproto b/tests/unit/v1beta1/testdata/set-st.textproto
deleted file mode 100644
index 8bceddceea..0000000000
--- a/tests/unit/v1beta1/testdata/set-st.textproto
+++ /dev/null
@@ -1,36 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with the special ServerTimestamp sentinel is removed from the data in the
-# update operation. Instead it appears in a separate Transform operation. Note
-# that in these tests, the string "ServerTimestamp" should be replaced with the
-# special ServerTimestamp value.
-
-description: "set: ServerTimestamp with data"
-set: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/test-suite.binproto b/tests/unit/v1beta1/testdata/test-suite.binproto
deleted file mode 100644
index 6e3ce39737..0000000000
Binary files a/tests/unit/v1beta1/testdata/test-suite.binproto and /dev/null differ
diff --git a/tests/unit/v1beta1/testdata/update-all-transforms.textproto b/tests/unit/v1beta1/testdata/update-all-transforms.textproto
deleted file mode 100644
index 225cc61e40..0000000000
--- a/tests/unit/v1beta1/testdata/update-all-transforms.textproto
+++ /dev/null
@@ -1,67 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can be created with any amount of transforms.
-
-description: "update: all transforms in a single call"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto
deleted file mode 100644
index 8c79a31d50..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto
+++ /dev/null
@@ -1,36 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ArrayRemove, then no update operation should
-# be produced.
-
-description: "update: ArrayRemove alone"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayRemove\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto
deleted file mode 100644
index 2362b6e094..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto
+++ /dev/null
@@ -1,69 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayRemove field. Since all the ArrayRemove
-# fields are removed, the only field in the update is "a".
-
-# b is not in the mask because it will be set in the transform. c must be in the
-# mask: it should be replaced entirely. The transform will set c.d to the
-# timestamp, but the update will delete the rest of c.
-
-description: "update: multiple ArrayRemove fields"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "c"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto
deleted file mode 100644
index 143790179e..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto
+++ /dev/null
@@ -1,52 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayRemove value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "update: nested ArrayRemove field"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto
deleted file mode 100644
index 04eca965c6..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayRemove. Firestore transforms don't support array indexing.
-
-description: "update: ArrayRemove cannot be anywhere inside an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto
deleted file mode 100644
index bbd27bf017..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayRemove must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "update: ArrayRemove cannot be in an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto
deleted file mode 100644
index 4888b44f1c..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "update: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayremove.textproto b/tests/unit/v1beta1/testdata/update-arrayremove.textproto
deleted file mode 100644
index 3b767cf486..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayremove.textproto
+++ /dev/null
@@ -1,50 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayRemove is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "update: ArrayRemove with data"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto
deleted file mode 100644
index ec12818da7..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto
+++ /dev/null
@@ -1,36 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ArrayUnion, then no update operation should
-# be produced.
-
-description: "update: ArrayUnion alone"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayUnion\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto
deleted file mode 100644
index 8edf6a3af0..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto
+++ /dev/null
@@ -1,69 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayUnion field. Since all the ArrayUnion
-# fields are removed, the only field in the update is "a".
-
-# b is not in the mask because it will be set in the transform. c must be in the
-# mask: it should be replaced entirely. The transform will set c.d to the
-# timestamp, but the update will delete the rest of c.
-
-description: "update: multiple ArrayUnion fields"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "c"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- append_missing_elements: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto
deleted file mode 100644
index 217e2e2ca7..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto
+++ /dev/null
@@ -1,52 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayUnion value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "update: nested ArrayUnion field"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto
deleted file mode 100644
index 0326781830..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayUnion. Firestore transforms don't support array indexing.
-
-description: "update: ArrayUnion cannot be anywhere inside an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto
deleted file mode 100644
index c199f9f73c..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayUnion must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "update: ArrayUnion cannot be in an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto
deleted file mode 100644
index ee022f8492..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "update: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-arrayunion.textproto b/tests/unit/v1beta1/testdata/update-arrayunion.textproto
deleted file mode 100644
index 81b240b891..0000000000
--- a/tests/unit/v1beta1/testdata/update-arrayunion.textproto
+++ /dev/null
@@ -1,50 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayUnion is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "update: ArrayUnion with data"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-badchar.textproto b/tests/unit/v1beta1/testdata/update-badchar.textproto
deleted file mode 100644
index 656ff53b68..0000000000
--- a/tests/unit/v1beta1/testdata/update-badchar.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The keys of the data given to Update are interpreted, unlike those of Create and
-# Set. They cannot contain special characters.
-
-description: "update: invalid character"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a~b\": 1}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-basic.textproto b/tests/unit/v1beta1/testdata/update-basic.textproto
deleted file mode 100644
index 9da316f58e..0000000000
--- a/tests/unit/v1beta1/testdata/update-basic.textproto
+++ /dev/null
@@ -1,30 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A simple call, resulting in a single update operation.
-
-description: "update: basic"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-complex.textproto b/tests/unit/v1beta1/testdata/update-complex.textproto
deleted file mode 100644
index 1a6d9eff64..0000000000
--- a/tests/unit/v1beta1/testdata/update-complex.textproto
+++ /dev/null
@@ -1,65 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A call to a write method with complicated input data.
-
-description: "update: complex"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- array_value: <
- values: <
- integer_value: 1
- >
- values: <
- double_value: 2.5
- >
- >
- >
- >
- fields: <
- key: "b"
- value: <
- map_value: <
- fields: <
- key: "c"
- value: <
- array_value: <
- values: <
- string_value: "three"
- >
- values: <
- map_value: <
- fields: <
- key: "d"
- value: <
- boolean_value: true
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-del-alone.textproto b/tests/unit/v1beta1/testdata/update-del-alone.textproto
deleted file mode 100644
index 8f558233f0..0000000000
--- a/tests/unit/v1beta1/testdata/update-del-alone.textproto
+++ /dev/null
@@ -1,25 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the input data consists solely of Deletes, then the update operation has no
-# map, just an update mask.
-
-description: "update: Delete alone"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": \"Delete\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-del-dot.textproto b/tests/unit/v1beta1/testdata/update-del-dot.textproto
deleted file mode 100644
index c0ebdf61f7..0000000000
--- a/tests/unit/v1beta1/testdata/update-del-dot.textproto
+++ /dev/null
@@ -1,46 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# After expanding top-level dotted fields, fields with Delete values are pruned
-# from the output data, but appear in the update mask.
-
-description: "update: Delete with a dotted field"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b.c\": \"Delete\", \"b.d\": 2}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- fields: <
- key: "b"
- value: <
- map_value: <
- fields: <
- key: "d"
- value: <
- integer_value: 2
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b.c"
- field_paths: "b.d"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-del-nested.textproto b/tests/unit/v1beta1/testdata/update-del-nested.textproto
deleted file mode 100644
index ed102697e6..0000000000
--- a/tests/unit/v1beta1/testdata/update-del-nested.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a top-level key.
-
-description: "update: Delete cannot be nested"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": {\"b\": \"Delete\"}}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto
deleted file mode 100644
index a2eec49661..0000000000
--- a/tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "update: Delete cannot be anywhere inside an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-del-noarray.textproto b/tests/unit/v1beta1/testdata/update-del-noarray.textproto
deleted file mode 100644
index a7eea87ef4..0000000000
--- a/tests/unit/v1beta1/testdata/update-del-noarray.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "update: Delete cannot be in an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, \"Delete\"]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-del.textproto b/tests/unit/v1beta1/testdata/update-del.textproto
deleted file mode 100644
index ec443e6c70..0000000000
--- a/tests/unit/v1beta1/testdata/update-del.textproto
+++ /dev/null
@@ -1,32 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a field's value is the Delete sentinel, then it doesn't appear in the update
-# data, but does in the mask.
-
-description: "update: Delete"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"Delete\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-exists-precond.textproto b/tests/unit/v1beta1/testdata/update-exists-precond.textproto
deleted file mode 100644
index 3c6fef4e22..0000000000
--- a/tests/unit/v1beta1/testdata/update-exists-precond.textproto
+++ /dev/null
@@ -1,14 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Update method does not support an explicit exists precondition.
-
-description: "update: Exists precondition is invalid"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- precondition: <
- exists: true
- >
- json_data: "{\"a\": 1}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-fp-empty-component.textproto b/tests/unit/v1beta1/testdata/update-fp-empty-component.textproto
deleted file mode 100644
index c3bceff3e4..0000000000
--- a/tests/unit/v1beta1/testdata/update-fp-empty-component.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Empty fields are not allowed.
-
-description: "update: empty field path component"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a..b\": 1}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto b/tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto
deleted file mode 100644
index d2cee270d5..0000000000
--- a/tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto
+++ /dev/null
@@ -1,58 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# For updates, top-level paths in json-like map inputs are split on the dot. That
-# is, an input {"a.b.c": 7} results in an update to field c of object b of object
-# a with value 7. In order to specify this behavior, the update must use a
-# fieldmask "a.b.c". However, fieldmasks are only used for concrete values -
-# transforms are separately encoded in a DocumentTransform_FieldTransform array.
-
-# This test exercises a bug found in python
-# (https://github.com/googleapis/google-cloud-python/issues/7215) in which nested
-# transforms ({"a.c": "ServerTimestamp"}) next to nested values ({"a.b": 7})
-# incorrectly caused the fieldmask "a" to be set, which has the effect of wiping
-# out all data in "a" other than what was specified in the json-like input.
-
-# Instead, as this test specifies, transforms should not affect the fieldmask.
-
-description: "update: Nested transforms should not affect the field mask, even\nwhen there are other values that do. Transforms should only affect the\nDocumentTransform_FieldTransform list."
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a.b\": 7, \"a.c\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- integer_value: 7
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a.b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-no-paths.textproto b/tests/unit/v1beta1/testdata/update-no-paths.textproto
deleted file mode 100644
index b524b7483f..0000000000
--- a/tests/unit/v1beta1/testdata/update-no-paths.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# It is a client-side error to call Update with empty data.
-
-description: "update: no paths"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto b/tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto
deleted file mode 100644
index 8cfad47320..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto
+++ /dev/null
@@ -1,82 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can be created with any amount of transforms.
-
-description: "update-paths: all transforms in a single call"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- field_paths: <
- field: "c"
- >
- field_paths: <
- field: "d"
- >
- json_values: "1"
- json_values: "\"ServerTimestamp\""
- json_values: "[\"ArrayUnion\", 1, 2, 3]"
- json_values: "[\"ArrayRemove\", 4, 5, 6]"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto
deleted file mode 100644
index 68f0e147b2..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto
+++ /dev/null
@@ -1,39 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ArrayRemove, then no update operation should
-# be produced.
-
-description: "update-paths: ArrayRemove alone"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[\"ArrayRemove\", 1, 2, 3]"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto
deleted file mode 100644
index b60c3f36a6..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto
+++ /dev/null
@@ -1,76 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayRemove field. Since all the ArrayRemove
-# fields are removed, the only field in the update is "a".
-
-description: "update-paths: multiple ArrayRemove fields"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- field_paths: <
- field: "c"
- >
- json_values: "1"
- json_values: "[\"ArrayRemove\", 1, 2, 3]"
- json_values: "{\"d\": [\"ArrayRemove\", 4, 5, 6]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "c"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- remove_all_from_array: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto
deleted file mode 100644
index 381be19d55..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto
+++ /dev/null
@@ -1,59 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayRemove value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "update-paths: nested ArrayRemove field"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "{\"c\": [\"ArrayRemove\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto
deleted file mode 100644
index 35f6c67b2e..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayRemove. Firestore transforms don't support array indexing.
-
-description: "update-paths: ArrayRemove cannot be anywhere inside an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto
deleted file mode 100644
index 45cab48dd9..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayRemove must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "update-paths: ArrayRemove cannot be in an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, 2, [\"ArrayRemove\", 1, 2, 3]]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto
deleted file mode 100644
index 67b92a3ef3..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "update-paths: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[\"ArrayRemove\", 1, \"ServerTimestamp\", 3]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto
deleted file mode 100644
index d3866676ed..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto
+++ /dev/null
@@ -1,57 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayRemove is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "update-paths: ArrayRemove with data"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "[\"ArrayRemove\", 1, 2, 3]"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- remove_all_from_array: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto
deleted file mode 100644
index 48100e0abc..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto
+++ /dev/null
@@ -1,39 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ArrayUnion, then no update operation should
-# be produced.
-
-description: "update-paths: ArrayUnion alone"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[\"ArrayUnion\", 1, 2, 3]"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto
deleted file mode 100644
index 03772e5ddd..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto
+++ /dev/null
@@ -1,76 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ArrayUnion field. Since all the ArrayUnion
-# fields are removed, the only field in the update is "a".
-
-description: "update-paths: multiple ArrayUnion fields"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- field_paths: <
- field: "c"
- >
- json_values: "1"
- json_values: "[\"ArrayUnion\", 1, 2, 3]"
- json_values: "{\"d\": [\"ArrayUnion\", 4, 5, 6]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "c"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- field_transforms: <
- field_path: "c.d"
- append_missing_elements: <
- values: <
- integer_value: 4
- >
- values: <
- integer_value: 5
- >
- values: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto
deleted file mode 100644
index 1420e4e280..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto
+++ /dev/null
@@ -1,59 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# An ArrayUnion value can occur at any depth. In this case, the transform applies
-# to the field path "b.c". Since "c" is removed from the update, "b" becomes
-# empty, so it is also removed from the update.
-
-description: "update-paths: nested ArrayUnion field"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "{\"c\": [\"ArrayUnion\", 1, 2, 3]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto
deleted file mode 100644
index ab75bf38a3..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ArrayUnion. Firestore transforms don't support array indexing.
-
-description: "update-paths: ArrayUnion cannot be anywhere inside an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto
deleted file mode 100644
index fac72644fc..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# ArrayUnion must be the value of a field. Firestore transforms don't support
-# array indexing.
-
-description: "update-paths: ArrayUnion cannot be in an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, 2, [\"ArrayRemove\", 1, 2, 3]]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto
deleted file mode 100644
index d194c09bd7..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. It may not appear in
-# an ArrayUnion.
-
-description: "update-paths: The ServerTimestamp sentinel cannot be in an ArrayUnion"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[\"ArrayUnion\", 1, \"ServerTimestamp\", 3]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto
deleted file mode 100644
index fc56c1e294..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto
+++ /dev/null
@@ -1,57 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with ArrayUnion is removed from the data in the update operation. Instead
-# it appears in a separate Transform operation.
-
-description: "update-paths: ArrayUnion with data"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "[\"ArrayUnion\", 1, 2, 3]"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- append_missing_elements: <
- values: <
- integer_value: 1
- >
- values: <
- integer_value: 2
- >
- values: <
- integer_value: 3
- >
- >
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-basic.textproto b/tests/unit/v1beta1/testdata/update-paths-basic.textproto
deleted file mode 100644
index 515f29d6af..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-basic.textproto
+++ /dev/null
@@ -1,33 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A simple call, resulting in a single update operation.
-
-description: "update-paths: basic"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "1"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-complex.textproto b/tests/unit/v1beta1/testdata/update-paths-complex.textproto
deleted file mode 100644
index 38a832239f..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-complex.textproto
+++ /dev/null
@@ -1,72 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A call to a write method with complicated input data.
-
-description: "update-paths: complex"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "[1, 2.5]"
- json_values: "{\"c\": [\"three\", {\"d\": true}]}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- array_value: <
- values: <
- integer_value: 1
- >
- values: <
- double_value: 2.5
- >
- >
- >
- >
- fields: <
- key: "b"
- value: <
- map_value: <
- fields: <
- key: "c"
- value: <
- array_value: <
- values: <
- string_value: "three"
- >
- values: <
- map_value: <
- fields: <
- key: "d"
- value: <
- boolean_value: true
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-del-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-del-alone.textproto
deleted file mode 100644
index 5dbb787de9..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-del-alone.textproto
+++ /dev/null
@@ -1,28 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the input data consists solely of Deletes, then the update operation has no
-# map, just an update mask.
-
-description: "update-paths: Delete alone"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "\"Delete\""
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-del-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-del-nested.textproto
deleted file mode 100644
index bdf65fb0ad..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-del-nested.textproto
+++ /dev/null
@@ -1,14 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a top-level key.
-
-description: "update-paths: Delete cannot be nested"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "{\"b\": \"Delete\"}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto
deleted file mode 100644
index d3da15dda8..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto
+++ /dev/null
@@ -1,16 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "update-paths: Delete cannot be anywhere inside an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, {\"b\": \"Delete\"}]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto
deleted file mode 100644
index 9ebdd09451..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto
+++ /dev/null
@@ -1,16 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Delete sentinel must be the value of a field. Deletes are implemented by
-# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not
-# support array indexing.
-
-description: "update-paths: Delete cannot be in an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, 2, \"Delete\"]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-del.textproto b/tests/unit/v1beta1/testdata/update-paths-del.textproto
deleted file mode 100644
index 5197a78488..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-del.textproto
+++ /dev/null
@@ -1,39 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If a field's value is the Delete sentinel, then it doesn't appear in the update
-# data, but does in the mask.
-
-description: "update-paths: Delete"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "\"Delete\""
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto b/tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto
deleted file mode 100644
index 084e07726e..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto
+++ /dev/null
@@ -1,17 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Update method does not support an explicit exists precondition.
-
-description: "update-paths: Exists precondition is invalid"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- precondition: <
- exists: true
- >
- field_paths: <
- field: "a"
- >
- json_values: "1"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-del.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-del.textproto
deleted file mode 100644
index 5c92aeb8ca..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-del.textproto
+++ /dev/null
@@ -1,47 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If one nested field is deleted, and another isn't, preserve the second.
-
-description: "update-paths: field paths with delete"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "foo"
- field: "bar"
- >
- field_paths: <
- field: "foo"
- field: "delete"
- >
- json_values: "1"
- json_values: "\"Delete\""
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "foo"
- value: <
- map_value: <
- fields: <
- key: "bar"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "foo.bar"
- field_paths: "foo.delete"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto
deleted file mode 100644
index a84725a8d4..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto
+++ /dev/null
@@ -1,23 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The same field cannot occur more than once, even if all the operations are
-# transforms.
-
-description: "update-paths: duplicate field path with only transforms"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- field_paths: <
- field: "a"
- >
- json_values: "[\"ArrayUnion\", 1, 2, 3]"
- json_values: "\"ServerTimestamp\""
- json_values: "[\"ArrayUnion\", 4, 5, 6]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto
deleted file mode 100644
index fedbd3aab9..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto
+++ /dev/null
@@ -1,22 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The same field cannot occur more than once.
-
-description: "update-paths: duplicate field path"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- field_paths: <
- field: "a"
- >
- json_values: "1"
- json_values: "2"
- json_values: "3"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto
deleted file mode 100644
index 7a5df25b7e..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Empty fields are not allowed.
-
-description: "update-paths: empty field path component"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "*"
- field: ""
- >
- json_values: "1"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto
deleted file mode 100644
index 311e309326..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto
+++ /dev/null
@@ -1,13 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A FieldPath of length zero is invalid.
-
-description: "update-paths: empty field path"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- >
- json_values: "1"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto
deleted file mode 100644
index 9ba41e3981..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto
+++ /dev/null
@@ -1,42 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The UpdatePaths or equivalent method takes a list of FieldPaths. Each FieldPath
-# is a sequence of uninterpreted path components.
-
-description: "update-paths: multiple-element field path"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- field: "b"
- >
- json_values: "1"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a.b"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto
deleted file mode 100644
index 5164952667..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto
+++ /dev/null
@@ -1,48 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# FieldPath components are not split on dots.
-
-description: "update-paths: FieldPath elements are not split on dots"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a.b"
- field: "f.g"
- >
- json_values: "{\"n.o\": 7}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a.b"
- value: <
- map_value: <
- fields: <
- key: "f.g"
- value: <
- map_value: <
- fields: <
- key: "n.o"
- value: <
- integer_value: 7
- >
- >
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "`a.b`.`f.g`"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-no-paths.textproto b/tests/unit/v1beta1/testdata/update-paths-no-paths.textproto
deleted file mode 100644
index d9939dc947..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-no-paths.textproto
+++ /dev/null
@@ -1,10 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# It is a client-side error to call Update with empty data.
-
-description: "update-paths: no paths"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto b/tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto
deleted file mode 100644
index 1710b91097..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In the input data, one field cannot be a prefix of another.
-
-description: "update-paths: prefix #1"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- field: "b"
- >
- field_paths: <
- field: "a"
- >
- json_values: "1"
- json_values: "2"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto b/tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto
deleted file mode 100644
index be78ab58a6..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto
+++ /dev/null
@@ -1,19 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In the input data, one field cannot be a prefix of another.
-
-description: "update-paths: prefix #2"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "a"
- field: "b"
- >
- json_values: "1"
- json_values: "2"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto b/tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto
deleted file mode 100644
index b8a84c9d1f..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto
+++ /dev/null
@@ -1,20 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In the input data, one field cannot be a prefix of another, even if the values
-# could in principle be combined.
-
-description: "update-paths: prefix #3"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "a"
- field: "d"
- >
- json_values: "{\"b\": 1}"
- json_values: "2"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-special-chars.textproto b/tests/unit/v1beta1/testdata/update-paths-special-chars.textproto
deleted file mode 100644
index 51cb33b312..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-special-chars.textproto
+++ /dev/null
@@ -1,53 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# FieldPaths can contain special characters.
-
-description: "update-paths: special characters"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "*"
- field: "~"
- >
- field_paths: <
- field: "*"
- field: "`"
- >
- json_values: "1"
- json_values: "2"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "*"
- value: <
- map_value: <
- fields: <
- key: "`"
- value: <
- integer_value: 2
- >
- >
- fields: <
- key: "~"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "`*`.`\\``"
- field_paths: "`*`.`~`"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-st-alone.textproto
deleted file mode 100644
index abc44f55b4..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st-alone.textproto
+++ /dev/null
@@ -1,29 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ServerTimestamps, then no update operation
-# should be produced.
-
-description: "update-paths: ServerTimestamp alone"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "\"ServerTimestamp\""
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- set_to_server_value: REQUEST_TIME
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-st-multi.textproto
deleted file mode 100644
index b0b7df17d8..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st-multi.textproto
+++ /dev/null
@@ -1,56 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ServerTimestamp field. Since all the
-# ServerTimestamp fields are removed, the only field in the update is "a".
-
-description: "update-paths: multiple ServerTimestamp fields"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- field_paths: <
- field: "c"
- >
- json_values: "1"
- json_values: "\"ServerTimestamp\""
- json_values: "{\"d\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "c"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c.d"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-st-nested.textproto
deleted file mode 100644
index 3077368318..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st-nested.textproto
+++ /dev/null
@@ -1,49 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A ServerTimestamp value can occur at any depth. In this case, the transform
-# applies to the field path "b.c". Since "c" is removed from the update, "b"
-# becomes empty, so it is also removed from the update.
-
-description: "update-paths: nested ServerTimestamp field"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "{\"c\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto
deleted file mode 100644
index 2c2cb89b62..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ServerTimestamp sentinel. Firestore transforms don't support array indexing.
-
-description: "update-paths: ServerTimestamp cannot be anywhere inside an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, {\"b\": \"ServerTimestamp\"}]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto
deleted file mode 100644
index a2baa66f57..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto
+++ /dev/null
@@ -1,15 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. Firestore transforms
-# don't support array indexing.
-
-description: "update-paths: ServerTimestamp cannot be in an array value"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "[1, 2, \"ServerTimestamp\"]"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto
deleted file mode 100644
index a54a241565..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto
+++ /dev/null
@@ -1,51 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp
-# should be stripped out but the empty map should remain.
-
-description: "update-paths: ServerTimestamp beside an empty map"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- json_values: "{\"b\": {}, \"c\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- map_value: <
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-st.textproto b/tests/unit/v1beta1/testdata/update-paths-st.textproto
deleted file mode 100644
index 40634c1658..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-st.textproto
+++ /dev/null
@@ -1,49 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with the special ServerTimestamp sentinel is removed from the data in the
-# update operation. Instead it appears in a separate Transform operation. Note
-# that in these tests, the string "ServerTimestamp" should be replaced with the
-# special ServerTimestamp value.
-
-description: "update-paths: ServerTimestamp with data"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- field_paths: <
- field: "a"
- >
- field_paths: <
- field: "b"
- >
- json_values: "1"
- json_values: "\"ServerTimestamp\""
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-paths-uptime.textproto b/tests/unit/v1beta1/testdata/update-paths-uptime.textproto
deleted file mode 100644
index 7a15874bea..0000000000
--- a/tests/unit/v1beta1/testdata/update-paths-uptime.textproto
+++ /dev/null
@@ -1,40 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Update call supports a last-update-time precondition.
-
-description: "update-paths: last-update-time precondition"
-update_paths: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- precondition: <
- update_time: <
- seconds: 42
- >
- >
- field_paths: <
- field: "a"
- >
- json_values: "1"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- update_time: <
- seconds: 42
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-prefix-1.textproto b/tests/unit/v1beta1/testdata/update-prefix-1.textproto
deleted file mode 100644
index e5c895e73b..0000000000
--- a/tests/unit/v1beta1/testdata/update-prefix-1.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In the input data, one field cannot be a prefix of another.
-
-description: "update: prefix #1"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a.b\": 1, \"a\": 2}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-prefix-2.textproto b/tests/unit/v1beta1/testdata/update-prefix-2.textproto
deleted file mode 100644
index 4870176186..0000000000
--- a/tests/unit/v1beta1/testdata/update-prefix-2.textproto
+++ /dev/null
@@ -1,11 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In the input data, one field cannot be a prefix of another.
-
-description: "update: prefix #2"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"a.b\": 2}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-prefix-3.textproto b/tests/unit/v1beta1/testdata/update-prefix-3.textproto
deleted file mode 100644
index 0c03b0d6b8..0000000000
--- a/tests/unit/v1beta1/testdata/update-prefix-3.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In the input data, one field cannot be a prefix of another, even if the values
-# could in principle be combined.
-
-description: "update: prefix #3"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": {\"b\": 1}, \"a.d\": 2}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-quoting.textproto b/tests/unit/v1beta1/testdata/update-quoting.textproto
deleted file mode 100644
index 20e530a760..0000000000
--- a/tests/unit/v1beta1/testdata/update-quoting.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# In a field path, any component beginning with a non-letter or underscore is
-# quoted.
-
-description: "update: non-letter starting chars are quoted, except underscore"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"_0.1.+2\": 1}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "_0"
- value: <
- map_value: <
- fields: <
- key: "1"
- value: <
- map_value: <
- fields: <
- key: "+2"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "_0.`1`.`+2`"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-split-top-level.textproto b/tests/unit/v1beta1/testdata/update-split-top-level.textproto
deleted file mode 100644
index d1b0ca0da1..0000000000
--- a/tests/unit/v1beta1/testdata/update-split-top-level.textproto
+++ /dev/null
@@ -1,45 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Update method splits only top-level keys at dots. Keys at other levels are
-# taken literally.
-
-description: "update: Split on dots for top-level keys only"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"h.g\": {\"j.k\": 6}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "h"
- value: <
- map_value: <
- fields: <
- key: "g"
- value: <
- map_value: <
- fields: <
- key: "j.k"
- value: <
- integer_value: 6
- >
- >
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "h.g"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-split.textproto b/tests/unit/v1beta1/testdata/update-split.textproto
deleted file mode 100644
index b96fd6a4f7..0000000000
--- a/tests/unit/v1beta1/testdata/update-split.textproto
+++ /dev/null
@@ -1,44 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Update method splits top-level keys at dots.
-
-description: "update: split on dots"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a.b.c\": 1}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- map_value: <
- fields: <
- key: "c"
- value: <
- integer_value: 1
- >
- >
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a.b.c"
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-st-alone.textproto b/tests/unit/v1beta1/testdata/update-st-alone.textproto
deleted file mode 100644
index 0d5ab6e9fb..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-alone.textproto
+++ /dev/null
@@ -1,26 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# If the only values in the input are ServerTimestamps, then no update operation
-# should be produced.
-
-description: "update: ServerTimestamp alone"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a"
- set_to_server_value: REQUEST_TIME
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-st-dot.textproto b/tests/unit/v1beta1/testdata/update-st-dot.textproto
deleted file mode 100644
index 19d4d18432..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-dot.textproto
+++ /dev/null
@@ -1,27 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# Like other uses of ServerTimestamp, the data is pruned and the field does not
-# appear in the update mask, because it is in the transform. In this case An
-# update operation is produced just to hold the precondition.
-
-description: "update: ServerTimestamp with dotted field"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a.b.c\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a.b.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- current_document: <
- exists: true
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-st-multi.textproto b/tests/unit/v1beta1/testdata/update-st-multi.textproto
deleted file mode 100644
index 0434cb59ab..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-multi.textproto
+++ /dev/null
@@ -1,49 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A document can have more than one ServerTimestamp field. Since all the
-# ServerTimestamp fields are removed, the only field in the update is "a".
-
-# b is not in the mask because it will be set in the transform. c must be in the
-# mask: it should be replaced entirely. The transform will set c.d to the
-# timestamp, but the update will delete the rest of c.
-
-description: "update: multiple ServerTimestamp fields"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "c"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- field_transforms: <
- field_path: "c.d"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-st-nested.textproto b/tests/unit/v1beta1/testdata/update-st-nested.textproto
deleted file mode 100644
index f79d9c6a07..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-nested.textproto
+++ /dev/null
@@ -1,42 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A ServerTimestamp value can occur at any depth. In this case, the transform
-# applies to the field path "b.c". Since "c" is removed from the update, "b"
-# becomes empty, so it is also removed from the update.
-
-description: "update: nested ServerTimestamp field"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- field_paths: "b"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto
deleted file mode 100644
index 2939dd6464..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# There cannot be an array value anywhere on the path from the document root to
-# the ServerTimestamp sentinel. Firestore transforms don't support array indexing.
-
-description: "update: ServerTimestamp cannot be anywhere inside an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-st-noarray.textproto b/tests/unit/v1beta1/testdata/update-st-noarray.textproto
deleted file mode 100644
index f3879cdf22..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-noarray.textproto
+++ /dev/null
@@ -1,12 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The ServerTimestamp sentinel must be the value of a field. Firestore transforms
-# don't support array indexing.
-
-description: "update: ServerTimestamp cannot be in an array value"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}"
- is_error: true
->
diff --git a/tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto
deleted file mode 100644
index 1901de2a15..0000000000
--- a/tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto
+++ /dev/null
@@ -1,48 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp
-# should be stripped out but the empty map should remain.
-
-description: "update: ServerTimestamp beside an empty map"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- map_value: <
- fields: <
- key: "b"
- value: <
- map_value: <
- >
- >
- >
- >
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "a.c"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-st.textproto b/tests/unit/v1beta1/testdata/update-st.textproto
deleted file mode 100644
index 12045a9220..0000000000
--- a/tests/unit/v1beta1/testdata/update-st.textproto
+++ /dev/null
@@ -1,42 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# A key with the special ServerTimestamp sentinel is removed from the data in the
-# update operation. Instead it appears in a separate Transform operation. Note
-# that in these tests, the string "ServerTimestamp" should be replaced with the
-# special ServerTimestamp value.
-
-description: "update: ServerTimestamp with data"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- exists: true
- >
- >
- writes: <
- transform: <
- document: "projects/projectID/databases/(default)/documents/C/d"
- field_transforms: <
- field_path: "b"
- set_to_server_value: REQUEST_TIME
- >
- >
- >
- >
->
diff --git a/tests/unit/v1beta1/testdata/update-uptime.textproto b/tests/unit/v1beta1/testdata/update-uptime.textproto
deleted file mode 100644
index 66119ac61c..0000000000
--- a/tests/unit/v1beta1/testdata/update-uptime.textproto
+++ /dev/null
@@ -1,37 +0,0 @@
-# DO NOT MODIFY. This file was generated by
-# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go.
-
-# The Update call supports a last-update-time precondition.
-
-description: "update: last-update-time precondition"
-update: <
- doc_ref_path: "projects/projectID/databases/(default)/documents/C/d"
- precondition: <
- update_time: <
- seconds: 42
- >
- >
- json_data: "{\"a\": 1}"
- request: <
- database: "projects/projectID/databases/(default)"
- writes: <
- update: <
- name: "projects/projectID/databases/(default)/documents/C/d"
- fields: <
- key: "a"
- value: <
- integer_value: 1
- >
- >
- >
- update_mask: <
- field_paths: "a"
- >
- current_document: <
- update_time: <
- seconds: 42
- >
- >
- >
- >
->