From 728b07c9177532bbbbfd1890f23e98950aea3f02 Mon Sep 17 00:00:00 2001 From: Steffany Brown <30247553+steffnay@users.noreply.github.com> Date: Fri, 17 Jun 2022 11:20:43 -0700 Subject: [PATCH 1/7] feat: add destination_expiration_time property to copy job (#1277) * feat: add destination_expiration_time property to copy job * update test * refactor test * remove unused import * Update google/cloud/bigquery/job/copy_.py Co-authored-by: Anthonios Partheniou * Update google/cloud/bigquery/job/copy_.py Co-authored-by: Anthonios Partheniou Co-authored-by: Anthonios Partheniou --- google/cloud/bigquery/job/copy_.py | 14 ++++++++++++++ tests/system/test_client.py | 5 +++++ tests/unit/job/test_copy.py | 10 ++++++++++ 3 files changed, 29 insertions(+) diff --git a/google/cloud/bigquery/job/copy_.py b/google/cloud/bigquery/job/copy_.py index eb7f609a5..9d7548ec5 100644 --- a/google/cloud/bigquery/job/copy_.py +++ b/google/cloud/bigquery/job/copy_.py @@ -126,6 +126,20 @@ def operation_type(self, value: Optional[str]): value = OperationType.OPERATION_TYPE_UNSPECIFIED self._set_sub_prop("operationType", value) + @property + def destination_expiration_time(self) -> str: + """google.cloud.bigquery.job.DestinationExpirationTime: The time when the + destination table expires. Expired tables will be deleted and their storage reclaimed. + + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.destination_expiration_time + """ + return self._get_sub_prop("destinationExpirationTime") + + @destination_expiration_time.setter + def destination_expiration_time(self, value: str): + self._set_sub_prop("destinationExpirationTime", value) + class CopyJob(_AsyncJob): """Asynchronous job: copy data into a table from other tables. diff --git a/tests/system/test_client.py b/tests/system/test_client.py index 49eb70a8b..c99ee1c72 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -2153,6 +2153,11 @@ def test_table_snapshots(dataset_id): copy_config = CopyJobConfig() copy_config.operation_type = OperationType.SNAPSHOT + today = datetime.date.today() + destination_expiration_time = f"{today.year + 1}-01-01T00:00:00Z" + + copy_config.destination_expiration_time = destination_expiration_time + copy_job = client.copy_table( sources=source_table_path, destination=snapshot_table_path, diff --git a/tests/unit/job/test_copy.py b/tests/unit/job/test_copy.py index d94e5bc88..a3b5c70e3 100644 --- a/tests/unit/job/test_copy.py +++ b/tests/unit/job/test_copy.py @@ -19,6 +19,8 @@ from .helpers import _Base from .helpers import _make_client +import datetime + class TestCopyJobConfig(_Base): JOB_TYPE = "copy" @@ -36,6 +38,7 @@ def test_ctor_defaults(self): assert config.create_disposition is None assert config.write_disposition is None + assert config.destination_expiration_time is None assert config.destination_encryption_configuration is None assert config.operation_type == OperationType.OPERATION_TYPE_UNSPECIFIED @@ -48,15 +51,22 @@ def test_ctor_w_properties(self): write_disposition = WriteDisposition.WRITE_TRUNCATE snapshot_operation = OperationType.SNAPSHOT + today = datetime.date.today() + destination_expiration_time = f"{today.year + 1}-01-01T00:00:00Z" + config = self._get_target_class()( create_disposition=create_disposition, write_disposition=write_disposition, operation_type=snapshot_operation, + destination_expiration_time=destination_expiration_time, ) self.assertEqual(config.create_disposition, create_disposition) self.assertEqual(config.write_disposition, write_disposition) self.assertEqual(config.operation_type, snapshot_operation) + self.assertEqual( + config.destination_expiration_time, destination_expiration_time + ) def test_to_api_repr_with_encryption(self): from google.cloud.bigquery.encryption_configuration import ( From e760d1bcb76561b4247adde2fd06ae0b686befb9 Mon Sep 17 00:00:00 2001 From: Steffany Brown <30247553+steffnay@users.noreply.github.com> Date: Fri, 17 Jun 2022 14:11:32 -0700 Subject: [PATCH 2/7] docs(samples): add table snapshot sample (#1274) * docs(samples): add table snapshot sample * docs(samples): fix region tag --- samples/snippets/conftest.py | 15 ++++++- samples/snippets/create_table_snapshot.py | 43 +++++++++++++++++++ .../snippets/create_table_snapshot_test.py | 33 ++++++++++++++ 3 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 samples/snippets/create_table_snapshot.py create mode 100644 samples/snippets/create_table_snapshot_test.py diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py index 37b52256b..f53509d44 100644 --- a/samples/snippets/conftest.py +++ b/samples/snippets/conftest.py @@ -18,7 +18,6 @@ import pytest import test_utils.prefixer - prefixer = test_utils.prefixer.Prefixer("python-bigquery", "samples/snippets") @@ -52,6 +51,20 @@ def dataset_id(bigquery_client: bigquery.Client, project_id: str) -> Iterator[st bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) +@pytest.fixture +def table_id( + bigquery_client: bigquery.Client, project_id: str, dataset_id: str +) -> Iterator[str]: + table_id = prefixer.create_prefix() + full_table_id = f"{project_id}.{dataset_id}.{table_id}" + table = bigquery.Table( + full_table_id, schema=[bigquery.SchemaField("string_col", "STRING")] + ) + bigquery_client.create_table(table) + yield full_table_id + bigquery_client.delete_table(table, not_found_ok=True) + + @pytest.fixture(scope="session") def entity_id(bigquery_client: bigquery.Client, dataset_id: str) -> str: return "cloud-developer-relations@google.com" diff --git a/samples/snippets/create_table_snapshot.py b/samples/snippets/create_table_snapshot.py new file mode 100644 index 000000000..846495e5c --- /dev/null +++ b/samples/snippets/create_table_snapshot.py @@ -0,0 +1,43 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_table_snapshot(source_table_id: str, snapshot_table_id: str) -> None: + original_source_table_id = source_table_id + original_snapshot_table_id = snapshot_table_id + # [START bigquery_create_table_snapshot] + from google.cloud import bigquery + + # TODO(developer): Set table_id to the ID of the table to create. + source_table_id = "your-project.your_dataset.your_table_name" + snapshot_table_id = "your-project.your_dataset.snapshot_table_name" + # [END bigquery_create_table_snapshot] + source_table_id = original_source_table_id + snapshot_table_id = original_snapshot_table_id + # [START bigquery_create_table_snapshot] + + # Construct a BigQuery client object. + client = bigquery.Client() + copy_config = bigquery.CopyJobConfig() + copy_config.operation_type = bigquery.OperationType.SNAPSHOT + + copy_job = client.copy_table( + sources=source_table_id, + destination=snapshot_table_id, + job_config=copy_config, + ) + copy_job.result() + + print("Created table snapshot {}".format(snapshot_table_id)) + # [END bigquery_create_table_snapshot] diff --git a/samples/snippets/create_table_snapshot_test.py b/samples/snippets/create_table_snapshot_test.py new file mode 100644 index 000000000..f1d8d0f7b --- /dev/null +++ b/samples/snippets/create_table_snapshot_test.py @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing + +import create_table_snapshot + +if typing.TYPE_CHECKING: + import pytest + + +def test_create_table_snapshot( + capsys: "pytest.CaptureFixture[str]", + table_id: str, + random_table_id: str, +) -> None: + + create_table_snapshot.create_table_snapshot(table_id, random_table_id) + + out, _ = capsys.readouterr() + + assert "Created table snapshot {}".format(random_table_id) in out From 2cc4c0b14d016013cbc29e43dc109ced47371ee2 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 27 Jun 2022 14:39:26 -0500 Subject: [PATCH 3/7] doc: share design document for query retry logic (#1123) * doc: share design document for query retry logic * add design document to contents tree * clarify a few points * Update docs/design/query-retries.md Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- docs/design/index.rst | 11 ++++ docs/design/query-retries.md | 115 +++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + 3 files changed, 127 insertions(+) create mode 100644 docs/design/index.rst create mode 100644 docs/design/query-retries.md diff --git a/docs/design/index.rst b/docs/design/index.rst new file mode 100644 index 000000000..5750c7a98 --- /dev/null +++ b/docs/design/index.rst @@ -0,0 +1,11 @@ +Client Library Design +===================== + +Some features of this client library have complex requirements and/or +implementation. These documents describe the design decisions that contributued +to those features. + +.. toctree:: + :maxdepth: 2 + + query-retries diff --git a/docs/design/query-retries.md b/docs/design/query-retries.md new file mode 100644 index 000000000..1bac82f5c --- /dev/null +++ b/docs/design/query-retries.md @@ -0,0 +1,115 @@ +# Design of query retries in the BigQuery client libraries for Python + + +## Overview + +The BigQuery client libraries for Python must safely retry API requests related to initiating a query. By "safely", it is meant that the BigQuery backend never successfully executes the query twice. This avoids duplicated rows from INSERT DML queries, among other problems. + +To achieve this goal, the client library only retries an API request relating to queries if at least one of the following is true: (1) issuing this exact request is idempotent, meaning that it won't result in a duplicate query being issued, or (2) the query has already failed in such a way that it is safe to re-issue the query. + + +## Background + + +### API-level retries + +Retries for nearly all API requests were [added in 2017](https://github.com/googleapis/google-cloud-python/pull/4148) and are [configurable via a Retry object](https://googleapis.dev/python/google-api-core/latest/retry.html#google.api_core.retry.Retry) passed to the retry argument. Notably, this includes the "query" method on the Python client, corresponding to the [jobs.insert REST API method](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert). The Python client always populates the [jobReference.jobId](https://cloud.google.com/bigquery/docs/reference/rest/v2/JobReference#FIELDS.job_id) field of the request body. If the BigQuery REST API receives a jobs.insert request for a job with the same ID, the REST API fails because the job already exists. + + +### jobs.insert and jobs.query API requests + +By default, the Python client starts a query using the [jobs.insert REST API +method](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert). +Support for the [jobs.query REST API +method](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query) +was [added via the `api_method` +parameter](https://github.com/googleapis/python-bigquery/pull/967) and is +included in version 3.0 of the Python client library. + +The jobs.query REST API method differs from jobs.insert in that it does not accept a job ID. Instead, the [requestId parameter](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#QueryRequest.FIELDS.request_id) provides a window of idempotency for duplicate requests. + + +### Re-issuing a query + +The ability to re-issue a query automatically was a [long](https://github.com/googleapis/google-cloud-python/issues/5555) [requested](https://github.com/googleapis/python-bigquery/issues/14) [feature](https://github.com/googleapis/python-bigquery/issues/539). As work ramped up on the SQLAlchemy connector, it became clear that this feature was necessary to keep the test suite, which issues hundreds of queries, from being [too flakey](https://github.com/googleapis/python-bigquery-sqlalchemy/issues?q=is%3Aissue+is%3Aclosed+author%3Aapp%2Fflaky-bot+sort%3Acreated-asc). + +Retrying a query is not as simple as retrying a single API request. In many +cases the client library does not "know" about a query job failure until it +tries to fetch the query results. To solve this, the [client re-issues a +query](https://github.com/googleapis/python-bigquery/pull/837) as it was +originally issued only if the query job has failed for a retryable reason. + + +### getQueryResults error behavior + +The client library uses [the jobs.getQueryResults REST API method](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/getQueryResults) to wait for a query to finish. This REST API has a unique behavior in that it translates query job failures into HTTP error status codes. To disambiguate these error responses from one that may have occurred further up the REST API stack (such as from the Google load balancer), the client library inspects the error response body. + +When the error corresponds to a query job failure, BigQuery populates the +"errors" array field, with the first element in the list corresponding to the +error which directly caused the job failure. There are many [error response +messages](https://cloud.google.com/bigquery/docs/error-messages), but only some +of them indicate that re-issuing the query job may help. For example, if the +job fails due to invalid query syntax, re-issuing the query won't help. If a +query job fails due to "backendError" or "rateLimitExceeded", we know that the +job did not successfully execute for some other reason. + + +## Detailed design + +As mentioned in the "Overview" section, the Python client only retries a query request if at least one of the following is true: (1) issuing this exact request is idempotent, meaning that it won't result in a duplicate query being issued, or (2) the query has already failed in such a way that it is safe to re-issue the query. + +A developer can configure when to retry an API request (corresponding to #1 "issuing this exact request is idempotent") via the query method's `retry` parameter. A developer can configure when to re-issue a query job after a job failure (corresponding to #2 "the query has already failed") via the query method's `job_retry` parameter. + + +### Retrying API requests via the `retry` parameter + +The first set of retries are at the API layer. The client library sends an +identical request if the request is idempotent. + +#### Retrying the jobs.insert API via the retry parameter + +When the `api_method` parameter is set to `"INSERT"`, which is the default +value, the client library uses the jobs.insert REST API to start a query job. +Before it issues this request, it sets a job ID. This job ID remains constant +across API retries. + +If the job ID was randomly generated, and the jobs.insert request and all retries fail, the client library sends a request to the jobs.get API. This covers the case when a query request succeeded, but there was a transient issue that prevented the client from receiving a successful response. + + +#### Retrying the jobs.query API via the retry parameter + +When the `api_method` parameter is set to `"QUERY"` (available in version 3 of +the client library), the client library sends a request to the jobs.query REST +API. The client library automatically populates the `requestId` parameter in +the request body. The `requestId` remains constant across API retries, ensuring +that requests are idempotent. + +As there is no job ID available, the client library cannot call jobs.get if the query happened to succeed, but all retries resulted in an error response. In this case, the client library throws an exception. + + +#### Retrying the jobs.getQueryResults API via the retry parameter + +The jobs.getQueryResults REST API is read-only. Thus, it is always safe to +retry. As noted in the "Background" section, HTTP error response codes can +indicate that the job itself has failed, so this may retry more often than is +strictly needed +([Issue #1122](https://github.com/googleapis/python-bigquery/issues/1122) +has been opened to investigate this). + + +### Re-issuing queries via the `job_retry` parameter + +The first set of retries are at the "job" layer, called "re-issue" in this +document. The client library sends an identical query request (except for the +job or request identifier) if the query job has failed for a re-issuable reason. + + +#### Deciding when it is safe to re-issue a query + +The conditions when it is safe to re-issue a query are different from the conditions when it is safe to retry an individual API request. As such, the `job_retry` parameter is provided to configure this behavior. + +The `job_retry` parameter is only used if (1) a query job fails and (2) a job ID is not provided by the developer. This is because it must generate a new job ID (or request ID, depending on the method used to create the query job) to avoid getting the same failed job. + +The `job_retry` parameter logic only happens after the client makes a request to the `jobs.getQueryRequest` REST API, which fails. The client examines the exception to determine if this failure was caused by a failed job and that the failure reason (e.g. "backendError" or "rateLimitExceeded") indicates that re-issuing the query may help. + +If it is determined that the query job can be re-issued safely, the original logic to issue the query is executed. If the jobs.insert REST API was originally used, a new job ID is generated. Otherwise, if the jobs.query REST API was originally used, a new request ID is generated. All other parts of the request body remain identical to the original request body for the failed query job, and the process repeats until `job_retry` is exhausted. diff --git a/docs/index.rst b/docs/index.rst index 4ab0a298d..500c67a7f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -26,6 +26,7 @@ API Reference reference dbapi + design/index Migration Guide --------------- From 52d9f14fb1d183f64a62fee1fddc0bf576a0a3e9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 9 Jul 2022 13:46:56 -0400 Subject: [PATCH 4/7] fix: require python 3.7+ (#1284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * add api_description to .repo-metadata.json * require python 3.7+ in setup.py * remove python 3.6 sample configs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * exclude templated README.rst * update python_requires * remove python 3.6 from noxfile.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/continuous/prerelease-deps.cfg | 7 ++++ .kokoro/presubmit/prerelease-deps.cfg | 7 ++++ .kokoro/samples/python3.6/common.cfg | 40 ------------------- .kokoro/samples/python3.6/continuous.cfg | 7 ---- .kokoro/samples/python3.6/periodic-head.cfg | 11 ----- .kokoro/samples/python3.6/periodic.cfg | 6 --- .kokoro/samples/python3.6/presubmit.cfg | 6 --- .kokoro/test-samples-impl.sh | 4 +- .repo-metadata.json | 3 +- CONTRIBUTING.rst | 6 +-- README.rst | 4 +- noxfile.py | 2 +- owlbot.py | 1 + samples/geography/noxfile.py | 2 +- samples/magics/noxfile.py | 2 +- samples/snippets/README.rst | 2 +- samples/snippets/noxfile.py | 2 +- .../templates/install_deps.tmpl.rst | 2 +- setup.py | 3 +- 20 files changed, 32 insertions(+), 89 deletions(-) create mode 100644 .kokoro/continuous/prerelease-deps.cfg create mode 100644 .kokoro/presubmit/prerelease-deps.cfg delete mode 100644 .kokoro/samples/python3.6/common.cfg delete mode 100644 .kokoro/samples/python3.6/continuous.cfg delete mode 100644 .kokoro/samples/python3.6/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.6/periodic.cfg delete mode 100644 .kokoro/samples/python3.6/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 757c9dca7..1ce608523 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 000000000..3595fb43f --- /dev/null +++ b/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 000000000..3595fb43f --- /dev/null +++ b/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index 20f6b9691..000000000 --- a/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af149..000000000 --- a/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index 5aa01bab5..000000000 --- a/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e597..000000000 --- a/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d9759..000000000 --- a/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 8a324c9c7..2c6500cae 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/.repo-metadata.json b/.repo-metadata.json index 670aba793..d1be7ec4d 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -13,5 +13,6 @@ "requires_billing": false, "default_version": "v2", "codeowner_team": "@googleapis/api-bigquery", - "api_shortname": "bigquery" + "api_shortname": "bigquery", + "api_description": "is a fully managed, NoOps, low cost data analytics service.\nData can be streamed into BigQuery at millions of rows per second to enable real-time analysis.\nWith BigQuery you can easily deploy Petabyte-scale Databases." } diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f183b63b4..d06598b31 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -221,13 +221,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-bigquery/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/README.rst b/README.rst index e8578916a..475d055a2 100644 --- a/README.rst +++ b/README.rst @@ -52,11 +52,11 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6, < 3.11 +Python >= 3.7, < 3.11 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7, Python == 3.5. +Python == 2.7, Python == 3.5, Python == 3.6. The last version of this library compatible with Python 2.7 and 3.5 is `google-cloud-bigquery==1.28.0`. diff --git a/noxfile.py b/noxfile.py index f088e10c2..c6f7c76b1 100644 --- a/noxfile.py +++ b/noxfile.py @@ -29,7 +29,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.10"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' diff --git a/owlbot.py b/owlbot.py index ca96f4e08..4d287ac46 100644 --- a/owlbot.py +++ b/owlbot.py @@ -68,6 +68,7 @@ # https://github.com/googleapis/python-bigquery/issues/191 ".kokoro/presubmit/presubmit.cfg", ".github/workflows", # exclude gh actions as credentials are needed for tests + "README.rst", ], ) diff --git a/samples/geography/noxfile.py b/samples/geography/noxfile.py index a40410b56..29b5bc852 100644 --- a/samples/geography/noxfile.py +++ b/samples/geography/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/magics/noxfile.py b/samples/magics/noxfile.py index a40410b56..29b5bc852 100644 --- a/samples/magics/noxfile.py +++ b/samples/magics/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst index 05af1e812..b5865a6ce 100644 --- a/samples/snippets/README.rst +++ b/samples/snippets/README.rst @@ -47,7 +47,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index a40410b56..29b5bc852 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d64989..6f069c6c8 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/setup.py b/setup.py index a040e96e7..a3d5c829e 100644 --- a/setup.py +++ b/setup.py @@ -111,7 +111,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -124,7 +123,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.6, <3.11", + python_requires=">=3.7, <3.11", include_package_data=True, zip_safe=False, ) From 9b7e3e424cbd08af8b08c91e6397a3f1b7811064 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Tue, 19 Jul 2022 12:49:22 -0400 Subject: [PATCH 5/7] docs(samples): explicitly add bq to samples reqs, upgrade grpc to fix bug on m1 (#1290) * fix: explicitly add bq to samples reqs, upgrade grpc to fix bug on m1 * update grpc in setup.py * fix: rm 3.6 constraints, add grpcio to 3.7-3.10 constraints --- samples/snippets/requirements.txt | 3 ++- setup.py | 2 +- testing/constraints-3.10.txt | 1 + testing/constraints-3.6.txt | 27 --------------------------- testing/constraints-3.7.txt | 2 +- testing/constraints-3.8.txt | 1 + testing/constraints-3.9.txt | 1 + 7 files changed, 7 insertions(+), 30 deletions(-) delete mode 100644 testing/constraints-3.6.txt diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index ad64565e2..a8322de0a 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,7 +1,8 @@ db-dtypes==1.0.1 +google-cloud-bigquery==3.2.0 google-cloud-bigquery-storage==2.13.1 google-auth-oauthlib==0.5.1 -grpcio==1.46.3 +grpcio==1.47.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.3.0; python_version >= '3.9' diff --git a/setup.py b/setup.py index a3d5c829e..f811a47b1 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-bigquery/issues/695 + "grpcio >= 1.47.0, < 2.0dev", # https://github.com/googleapis/python-bigquery/issues/1262 # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29bb..c5e37fc9b 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1 @@ +grpcio==1.47.0 diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt deleted file mode 100644 index 47b842a6d..000000000 --- a/testing/constraints-3.6.txt +++ /dev/null @@ -1,27 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -db-dtypes==0.3.0 -geopandas==0.9.0 -google-api-core==1.31.5 -google-cloud-bigquery-storage==2.0.0 -google-cloud-core==1.4.1 -google-resumable-media==0.6.0 -grpcio==1.38.1 -ipython==7.0.1 -opentelemetry-api==1.1.0 -opentelemetry-instrumentation==0.20b0 -opentelemetry-sdk==1.1.0 -pandas==1.0.0 -proto-plus==1.15.0 -protobuf==3.12.0 -pyarrow==3.0.0 -python-dateutil==2.7.2 -requests==2.18.0 -Shapely==1.6.0 -six==1.13.0 -tqdm==4.7.4 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index e3c7a332c..c5803387e 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -11,7 +11,7 @@ google-api-core==1.31.5 google-cloud-bigquery-storage==2.0.0 google-cloud-core==1.4.1 google-resumable-media==0.6.0 -grpcio==1.38.1 +grpcio==1.47.0 ipython==7.0.1 opentelemetry-api==1.1.0 opentelemetry-instrumentation==0.20b0 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 3fd8886e6..e5e73c5c7 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1 +1,2 @@ +grpcio==1.47.0 pandas==1.2.0 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index 39dc6250e..d4c302867 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -4,4 +4,5 @@ # # NOTE: Not comprehensive yet, will eventually be maintained semi-automatically by # the renovate bot. +grpcio==1.47.0 pyarrow>=4.0.0 From af0101d7d7c6a58941957f2f7652363a947c9f79 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Mon, 25 Jul 2022 13:57:12 -0400 Subject: [PATCH 6/7] chore: updates minor grammatical error (#1299) * Updates minor grammatical error * chore: update grammar/spelling --- google/cloud/bigquery/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index fb772ea11..1200d78f9 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -3230,7 +3230,7 @@ def query( will be ignored if a ``job_id`` is also given. location (Optional[str]): Location where to run the job. Must match the location of the - any table used in the query as well as the destination table. + table used in the query as well as the destination table. project (Optional[str]): Project ID of the project of where to run the job. Defaults to the client's project. From e782aa83ea7ba6e15ea7de81be19ea5374cce4d2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 26 Jul 2022 13:06:19 -0700 Subject: [PATCH 7/7] chore(main): release 3.3.0 (#1279) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 18 ++++++++++++++++++ google/cloud/bigquery/version.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f999aa10..c7214ea16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,24 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## [3.3.0](https://github.com/googleapis/python-bigquery/compare/v3.2.0...v3.3.0) (2022-07-25) + + +### Features + +* add destination_expiration_time property to copy job ([#1277](https://github.com/googleapis/python-bigquery/issues/1277)) ([728b07c](https://github.com/googleapis/python-bigquery/commit/728b07c9177532bbbbfd1890f23e98950aea3f02)) + + +### Bug Fixes + +* require python 3.7+ ([#1284](https://github.com/googleapis/python-bigquery/issues/1284)) ([52d9f14](https://github.com/googleapis/python-bigquery/commit/52d9f14fb1d183f64a62fee1fddc0bf576a0a3e9)) + + +### Documentation + +* **samples:** add table snapshot sample ([#1274](https://github.com/googleapis/python-bigquery/issues/1274)) ([e760d1b](https://github.com/googleapis/python-bigquery/commit/e760d1bcb76561b4247adde2fd06ae0b686befb9)) +* **samples:** explicitly add bq to samples reqs, upgrade grpc to fix bug on m1 ([#1290](https://github.com/googleapis/python-bigquery/issues/1290)) ([9b7e3e4](https://github.com/googleapis/python-bigquery/commit/9b7e3e424cbd08af8b08c91e6397a3f1b7811064)) + ## [3.2.0](https://github.com/googleapis/python-bigquery/compare/v3.1.0...v3.2.0) (2022-06-06) diff --git a/google/cloud/bigquery/version.py b/google/cloud/bigquery/version.py index c24ca23d6..2279c3674 100644 --- a/google/cloud/bigquery/version.py +++ b/google/cloud/bigquery/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "3.2.0" +__version__ = "3.3.0"