From 49cb478ddff59bb3647a4d75b05014cc00d0eebe Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 16 Nov 2021 03:19:06 -0500 Subject: [PATCH 01/14] chore: update doc links from googleapis.dev to cloud.google.com (#354) --- .repo-metadata.json | 2 +- README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index d4d8049b..68063615 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "bigquerystorage", "name_pretty": "Google BigQuery Storage", "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", - "client_documentation": "https://googleapis.dev/python/bigquerystorage/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerystorage/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", "release_level": "ga", "language": "python", diff --git a/README.rst b/README.rst index ecd5e44c..ff61077f 100644 --- a/README.rst +++ b/README.rst @@ -15,7 +15,7 @@ Python Client for BigQuery Storage API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-storage.svg :target: https://pypi.org/project/google-cloud-bigquery-storage/ .. _BigQuery Storage API: https://cloud.google.com/bigquery/docs/reference/storage/ -.. _Client Library Documentation: https://googleapis.dev/python/bigquerystorage/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigquerystorage/latest .. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/storage/ Quick Start From 3deb240a2ebfff477eab93bb053f658ade2511af Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Nov 2021 18:19:06 +0100 Subject: [PATCH 02/14] chore(deps): update dependency google-cloud-bigquery-storage to v2.10.1 (#353) Co-authored-by: Anthonios Partheniou --- samples/quickstart/requirements.txt | 2 +- samples/snippets/requirements.txt | 2 +- samples/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/quickstart/requirements.txt b/samples/quickstart/requirements.txt index 8a2ac6e0..27d2f18a 100644 --- a/samples/quickstart/requirements.txt +++ b/samples/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.10.0 +google-cloud-bigquery-storage==2.10.1 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index eb4c3460..5c612ffc 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.10.0 +google-cloud-bigquery-storage==2.10.1 google-cloud-bigquery==2.30.1 protobuf==3.19.1 diff --git a/samples/to_dataframe/requirements.txt b/samples/to_dataframe/requirements.txt index 6bc141a6..3019630c 100644 --- a/samples/to_dataframe/requirements.txt +++ b/samples/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.3.3 -google-cloud-bigquery-storage==2.10.0 +google-cloud-bigquery-storage==2.10.1 google-cloud-bigquery==2.30.1 pyarrow==6.0.0 ipython==7.24.0; python_version > '3.6' From ce63994e08091be06b75d4010f2eefb424dbb356 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 18 Nov 2021 16:41:06 +0100 Subject: [PATCH 03/14] chore(deps): update dependency pyarrow to v6.0.1 (#357) --- samples/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/to_dataframe/requirements.txt b/samples/to_dataframe/requirements.txt index 3019630c..009a6864 100644 --- a/samples/to_dataframe/requirements.txt +++ b/samples/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==2.3.3 google-cloud-bigquery-storage==2.10.1 google-cloud-bigquery==2.30.1 -pyarrow==6.0.0 +pyarrow==6.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' pandas==1.2.5; python_version > '3.6' From 201512580d06cc7f581d35bbeb87e0562d140282 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Dec 2021 11:54:01 +0100 Subject: [PATCH 04/14] chore(deps): update all dependencies (#358) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 2 +- samples/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 6bf5f5ed..383d41f0 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.2.0 +google-cloud-testutils==1.3.0 pytest==6.2.5 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 5c612ffc..6586c02e 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.10.1 -google-cloud-bigquery==2.30.1 +google-cloud-bigquery==2.31.0 protobuf==3.19.1 diff --git a/samples/to_dataframe/requirements.txt b/samples/to_dataframe/requirements.txt index 009a6864..c077506c 100644 --- a/samples/to_dataframe/requirements.txt +++ b/samples/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.3.3 google-cloud-bigquery-storage==2.10.1 -google-cloud-bigquery==2.30.1 +google-cloud-bigquery==2.31.0 pyarrow==6.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From aa9740d352b2359171a3a99811f88e24ae927189 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Dec 2021 10:19:38 -0600 Subject: [PATCH 05/14] feat: add `write_mode` property to BigQuery Storage Write API v1 (#360) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add write_mode support for BigQuery Storage Write API v1 Committer: @anahan PiperOrigin-RevId: 414771198 Source-Link: https://github.com/googleapis/googleapis/commit/8a2398e34424ba7368a5195882386b16cd1b076c Source-Link: https://github.com/googleapis/googleapis-gen/commit/9df7ea386b59d5cc397a392498d3c1a5e5a673cb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWRmN2VhMzg2YjU5ZDVjYzM5N2EzOTI0OThkM2MxYTVlNWE2NzNjYiJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/bigquery_storage_v1/types/stream.py | 8 ++++++++ .../test_big_query_write.py | 20 +++++++++++++++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/google/cloud/bigquery_storage_v1/types/stream.py b/google/cloud/bigquery_storage_v1/types/stream.py index 0faff50f..aa527022 100644 --- a/google/cloud/bigquery_storage_v1/types/stream.py +++ b/google/cloud/bigquery_storage_v1/types/stream.py @@ -194,6 +194,8 @@ class WriteStream(proto.Message): generate data that's compatible with this schema to send in initial ``AppendRowsRequest``. The table schema could go out of date during the life time of the stream. + write_mode (google.cloud.bigquery_storage_v1.types.WriteStream.WriteMode): + Immutable. Mode of the stream. """ class Type(proto.Enum): @@ -203,11 +205,17 @@ class Type(proto.Enum): PENDING = 2 BUFFERED = 3 + class WriteMode(proto.Enum): + r"""Mode enum of the stream.""" + WRITE_MODE_UNSPECIFIED = 0 + INSERT = 1 + name = proto.Field(proto.STRING, number=1,) type_ = proto.Field(proto.ENUM, number=2, enum=Type,) create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) commit_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) table_schema = proto.Field(proto.MESSAGE, number=5, message=gcbs_table.TableSchema,) + write_mode = proto.Field(proto.ENUM, number=7, enum=WriteMode,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py index 76bdfd8e..6d1f1d43 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py @@ -504,7 +504,9 @@ def test_create_write_stream( ) as call: # Designate an appropriate return value for the call. call.return_value = stream.WriteStream( - name="name_value", type_=stream.WriteStream.Type.COMMITTED, + name="name_value", + type_=stream.WriteStream.Type.COMMITTED, + write_mode=stream.WriteStream.WriteMode.INSERT, ) response = client.create_write_stream(request) @@ -517,6 +519,7 @@ def test_create_write_stream( assert isinstance(response, stream.WriteStream) assert response.name == "name_value" assert response.type_ == stream.WriteStream.Type.COMMITTED + assert response.write_mode == stream.WriteStream.WriteMode.INSERT def test_create_write_stream_from_dict(): @@ -559,7 +562,9 @@ async def test_create_write_stream_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( stream.WriteStream( - name="name_value", type_=stream.WriteStream.Type.COMMITTED, + name="name_value", + type_=stream.WriteStream.Type.COMMITTED, + write_mode=stream.WriteStream.WriteMode.INSERT, ) ) response = await client.create_write_stream(request) @@ -573,6 +578,7 @@ async def test_create_write_stream_async( assert isinstance(response, stream.WriteStream) assert response.name == "name_value" assert response.type_ == stream.WriteStream.Type.COMMITTED + assert response.write_mode == stream.WriteStream.WriteMode.INSERT @pytest.mark.asyncio @@ -805,7 +811,9 @@ def test_get_write_stream( with mock.patch.object(type(client.transport.get_write_stream), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = stream.WriteStream( - name="name_value", type_=stream.WriteStream.Type.COMMITTED, + name="name_value", + type_=stream.WriteStream.Type.COMMITTED, + write_mode=stream.WriteStream.WriteMode.INSERT, ) response = client.get_write_stream(request) @@ -818,6 +826,7 @@ def test_get_write_stream( assert isinstance(response, stream.WriteStream) assert response.name == "name_value" assert response.type_ == stream.WriteStream.Type.COMMITTED + assert response.write_mode == stream.WriteStream.WriteMode.INSERT def test_get_write_stream_from_dict(): @@ -856,7 +865,9 @@ async def test_get_write_stream_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( stream.WriteStream( - name="name_value", type_=stream.WriteStream.Type.COMMITTED, + name="name_value", + type_=stream.WriteStream.Type.COMMITTED, + write_mode=stream.WriteStream.WriteMode.INSERT, ) ) response = await client.get_write_stream(request) @@ -870,6 +881,7 @@ async def test_get_write_stream_async( assert isinstance(response, stream.WriteStream) assert response.name == "name_value" assert response.type_ == stream.WriteStream.Type.COMMITTED + assert response.write_mode == stream.WriteStream.WriteMode.INSERT @pytest.mark.asyncio From 3e90efb8eb56b3e3848c2560feef005189b6536c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 9 Dec 2021 15:03:12 -0800 Subject: [PATCH 06/14] chore: update python-docs-samples link to main branch (#362) Source-Link: https://github.com/googleapis/synthtool/commit/0941ef32b18aff0be34a40404f3971d9f51996e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- samples/AUTHORING_GUIDE.md | 2 +- samples/CONTRIBUTING.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7519fa3a..0b3c8cd9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md index 55c97b32..8249522f 100644 --- a/samples/AUTHORING_GUIDE.md +++ b/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md index 34c882b6..f5fe2e6b 100644 --- a/samples/CONTRIBUTING.md +++ b/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file From 03966048f4a03f8f0a10009147076043bb9ea371 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Dec 2021 13:21:36 -0500 Subject: [PATCH 07/14] chore: update .repo-metadata.json (#365) --- .repo-metadata.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index 68063615..59393f11 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -4,7 +4,7 @@ "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerystorage/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", "repo": "googleapis/python-bigquery-storage", @@ -12,5 +12,6 @@ "api_id": "bigquerystorage.googleapis.com", "requires_billing": true, "default_version": "v1", - "codeowner_team": "@googleapis/api-bigquery" + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigquerystorage" } From ed634f1ae5013bd44b8f070c57a12319dd3f9c13 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 17:40:35 +0000 Subject: [PATCH 08/14] chore: use python-samples-reviewers (#368) --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0b3c8cd9..f33299dd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f8714a3e..193b4363 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/api-bigquery -# @googleapis/python-samples-owners @googleapis/api-bigquery are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery +# @googleapis/python-samples-reviewers @googleapis/api-bigquery are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery From eec9d82e89cfa30da7607e2f20c98e473c1a08fe Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 Jan 2022 21:18:15 +0100 Subject: [PATCH 09/14] chore(deps): update dependency google-cloud-testutils to v1.3.1 (#359) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-testutils](https://togithub.com/googleapis/python-test-utils) | `==1.3.0` -> `==1.3.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/compatibility-slim/1.3.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/confidence-slim/1.3.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-test-utils ### [`v1.3.1`](https://togithub.com/googleapis/python-test-utils/blob/master/CHANGELOG.md#​131-httpswwwgithubcomgoogleapispython-test-utilscomparev130v131-2021-12-07) [Compare Source](https://togithub.com/googleapis/python-test-utils/compare/v1.3.0...v1.3.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 383d41f0..48472e00 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.3.0 +google-cloud-testutils==1.3.1 pytest==6.2.5 From bbc93d5321b476ff4776fa806d3cdb24e01b4dc7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 7 Jan 2022 13:52:19 -0500 Subject: [PATCH 10/14] chore: use gapic-generator-python 0.58.4 (#367) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.58.4 fix: provide appropriate mock values for message body fields committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../big_query_read/transports/base.py | 1 - .../big_query_write/transports/base.py | 1 - .../big_query_read/transports/base.py | 1 - .../big_query_write/transports/base.py | 1 - .../test_big_query_read.py | 39 ++++------- .../test_big_query_write.py | 66 ++++++------------- .../test_big_query_read.py | 39 ++++------- .../test_big_query_write.py | 66 ++++++------------- 8 files changed, 68 insertions(+), 146 deletions(-) diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py index 4d1226fe..40e60cea 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py index ed38b903..33a26023 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py index 7f9cbc31..deac9b75 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py index 57ac83d3..4d273a29 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index 35f1069c..e22cc909 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -242,20 +242,20 @@ def test_big_query_read_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -312,7 +312,7 @@ def test_big_query_read_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -407,7 +407,7 @@ def test_big_query_read_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -438,7 +438,7 @@ def test_big_query_read_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -469,9 +469,8 @@ def test_big_query_read_client_client_options_from_dict(): ) -def test_create_read_session( - transport: str = "grpc", request_type=storage.CreateReadSessionRequest -): +@pytest.mark.parametrize("request_type", [storage.CreateReadSessionRequest, dict,]) +def test_create_read_session(request_type, transport: str = "grpc"): client = BigQueryReadClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -507,10 +506,6 @@ def test_create_read_session( assert response.estimated_total_bytes_scanned == 3076 -def test_create_read_session_from_dict(): - test_create_read_session(request_type=dict) - - def test_create_read_session_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -728,7 +723,8 @@ async def test_create_read_session_flattened_error_async(): ) -def test_read_rows(transport: str = "grpc", request_type=storage.ReadRowsRequest): +@pytest.mark.parametrize("request_type", [storage.ReadRowsRequest, dict,]) +def test_read_rows(request_type, transport: str = "grpc"): client = BigQueryReadClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -753,10 +749,6 @@ def test_read_rows(transport: str = "grpc", request_type=storage.ReadRowsRequest assert isinstance(message, storage.ReadRowsResponse) -def test_read_rows_from_dict(): - test_read_rows(request_type=dict) - - def test_read_rows_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -934,9 +926,8 @@ async def test_read_rows_flattened_error_async(): ) -def test_split_read_stream( - transport: str = "grpc", request_type=storage.SplitReadStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.SplitReadStreamRequest, dict,]) +def test_split_read_stream(request_type, transport: str = "grpc"): client = BigQueryReadClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -962,10 +953,6 @@ def test_split_read_stream( assert isinstance(response, storage.SplitReadStreamResponse) -def test_split_read_stream_from_dict(): - test_split_read_stream(request_type=dict) - - def test_split_read_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1645,7 +1632,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py index 6d1f1d43..c63b1eac 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py @@ -256,20 +256,20 @@ def test_big_query_write_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -328,7 +328,7 @@ def test_big_query_write_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -423,7 +423,7 @@ def test_big_query_write_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -454,7 +454,7 @@ def test_big_query_write_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -487,9 +487,8 @@ def test_big_query_write_client_client_options_from_dict(): ) -def test_create_write_stream( - transport: str = "grpc", request_type=storage.CreateWriteStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.CreateWriteStreamRequest, dict,]) +def test_create_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -522,10 +521,6 @@ def test_create_write_stream( assert response.write_mode == stream.WriteStream.WriteMode.INSERT -def test_create_write_stream_from_dict(): - test_create_write_stream(request_type=dict) - - def test_create_write_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -729,7 +724,8 @@ async def test_create_write_stream_flattened_error_async(): ) -def test_append_rows(transport: str = "grpc", request_type=storage.AppendRowsRequest): +@pytest.mark.parametrize("request_type", [storage.AppendRowsRequest, dict,]) +def test_append_rows(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -755,10 +751,6 @@ def test_append_rows(transport: str = "grpc", request_type=storage.AppendRowsReq assert isinstance(message, storage.AppendRowsResponse) -def test_append_rows_from_dict(): - test_append_rows(request_type=dict) - - @pytest.mark.asyncio async def test_append_rows_async( transport: str = "grpc_asyncio", request_type=storage.AppendRowsRequest @@ -796,9 +788,8 @@ async def test_append_rows_async_from_dict(): await test_append_rows_async(request_type=dict) -def test_get_write_stream( - transport: str = "grpc", request_type=storage.GetWriteStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.GetWriteStreamRequest, dict,]) +def test_get_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -829,10 +820,6 @@ def test_get_write_stream( assert response.write_mode == stream.WriteStream.WriteMode.INSERT -def test_get_write_stream_from_dict(): - test_get_write_stream(request_type=dict) - - def test_get_write_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1010,9 +997,8 @@ async def test_get_write_stream_flattened_error_async(): ) -def test_finalize_write_stream( - transport: str = "grpc", request_type=storage.FinalizeWriteStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.FinalizeWriteStreamRequest, dict,]) +def test_finalize_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1039,10 +1025,6 @@ def test_finalize_write_stream( assert response.row_count == 992 -def test_finalize_write_stream_from_dict(): - test_finalize_write_stream(request_type=dict) - - def test_finalize_write_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1230,9 +1212,10 @@ async def test_finalize_write_stream_flattened_error_async(): ) -def test_batch_commit_write_streams( - transport: str = "grpc", request_type=storage.BatchCommitWriteStreamsRequest -): +@pytest.mark.parametrize( + "request_type", [storage.BatchCommitWriteStreamsRequest, dict,] +) +def test_batch_commit_write_streams(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1258,10 +1241,6 @@ def test_batch_commit_write_streams( assert isinstance(response, storage.BatchCommitWriteStreamsResponse) -def test_batch_commit_write_streams_from_dict(): - test_batch_commit_write_streams(request_type=dict) - - def test_batch_commit_write_streams_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1448,7 +1427,8 @@ async def test_batch_commit_write_streams_flattened_error_async(): ) -def test_flush_rows(transport: str = "grpc", request_type=storage.FlushRowsRequest): +@pytest.mark.parametrize("request_type", [storage.FlushRowsRequest, dict,]) +def test_flush_rows(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1473,10 +1453,6 @@ def test_flush_rows(transport: str = "grpc", request_type=storage.FlushRowsReque assert response.offset == 647 -def test_flush_rows_from_dict(): - test_flush_rows(request_type=dict) - - def test_flush_rows_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2217,7 +2193,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py index 3d9ae125..fca4bb24 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py @@ -244,20 +244,20 @@ def test_big_query_read_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -314,7 +314,7 @@ def test_big_query_read_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -409,7 +409,7 @@ def test_big_query_read_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -440,7 +440,7 @@ def test_big_query_read_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -471,9 +471,8 @@ def test_big_query_read_client_client_options_from_dict(): ) -def test_create_read_session( - transport: str = "grpc", request_type=storage.CreateReadSessionRequest -): +@pytest.mark.parametrize("request_type", [storage.CreateReadSessionRequest, dict,]) +def test_create_read_session(request_type, transport: str = "grpc"): client = BigQueryReadClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -507,10 +506,6 @@ def test_create_read_session( assert response.table == "table_value" -def test_create_read_session_from_dict(): - test_create_read_session(request_type=dict) - - def test_create_read_session_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -726,7 +721,8 @@ async def test_create_read_session_flattened_error_async(): ) -def test_read_rows(transport: str = "grpc", request_type=storage.ReadRowsRequest): +@pytest.mark.parametrize("request_type", [storage.ReadRowsRequest, dict,]) +def test_read_rows(request_type, transport: str = "grpc"): client = BigQueryReadClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -751,10 +747,6 @@ def test_read_rows(transport: str = "grpc", request_type=storage.ReadRowsRequest assert isinstance(message, storage.ReadRowsResponse) -def test_read_rows_from_dict(): - test_read_rows(request_type=dict) - - def test_read_rows_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -932,9 +924,8 @@ async def test_read_rows_flattened_error_async(): ) -def test_split_read_stream( - transport: str = "grpc", request_type=storage.SplitReadStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.SplitReadStreamRequest, dict,]) +def test_split_read_stream(request_type, transport: str = "grpc"): client = BigQueryReadClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -960,10 +951,6 @@ def test_split_read_stream( assert isinstance(response, storage.SplitReadStreamResponse) -def test_split_read_stream_from_dict(): - test_split_read_stream(request_type=dict) - - def test_split_read_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1643,7 +1630,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py index 5e5e9450..d58b5558 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py @@ -256,20 +256,20 @@ def test_big_query_write_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -328,7 +328,7 @@ def test_big_query_write_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -423,7 +423,7 @@ def test_big_query_write_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -454,7 +454,7 @@ def test_big_query_write_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -487,9 +487,8 @@ def test_big_query_write_client_client_options_from_dict(): ) -def test_create_write_stream( - transport: str = "grpc", request_type=storage.CreateWriteStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.CreateWriteStreamRequest, dict,]) +def test_create_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -519,10 +518,6 @@ def test_create_write_stream( assert response.type_ == stream.WriteStream.Type.COMMITTED -def test_create_write_stream_from_dict(): - test_create_write_stream(request_type=dict) - - def test_create_write_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -723,7 +718,8 @@ async def test_create_write_stream_flattened_error_async(): ) -def test_append_rows(transport: str = "grpc", request_type=storage.AppendRowsRequest): +@pytest.mark.parametrize("request_type", [storage.AppendRowsRequest, dict,]) +def test_append_rows(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -749,10 +745,6 @@ def test_append_rows(transport: str = "grpc", request_type=storage.AppendRowsReq assert isinstance(message, storage.AppendRowsResponse) -def test_append_rows_from_dict(): - test_append_rows(request_type=dict) - - @pytest.mark.asyncio async def test_append_rows_async( transport: str = "grpc_asyncio", request_type=storage.AppendRowsRequest @@ -790,9 +782,8 @@ async def test_append_rows_async_from_dict(): await test_append_rows_async(request_type=dict) -def test_get_write_stream( - transport: str = "grpc", request_type=storage.GetWriteStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.GetWriteStreamRequest, dict,]) +def test_get_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -820,10 +811,6 @@ def test_get_write_stream( assert response.type_ == stream.WriteStream.Type.COMMITTED -def test_get_write_stream_from_dict(): - test_get_write_stream(request_type=dict) - - def test_get_write_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -998,9 +985,8 @@ async def test_get_write_stream_flattened_error_async(): ) -def test_finalize_write_stream( - transport: str = "grpc", request_type=storage.FinalizeWriteStreamRequest -): +@pytest.mark.parametrize("request_type", [storage.FinalizeWriteStreamRequest, dict,]) +def test_finalize_write_stream(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1027,10 +1013,6 @@ def test_finalize_write_stream( assert response.row_count == 992 -def test_finalize_write_stream_from_dict(): - test_finalize_write_stream(request_type=dict) - - def test_finalize_write_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1218,9 +1200,10 @@ async def test_finalize_write_stream_flattened_error_async(): ) -def test_batch_commit_write_streams( - transport: str = "grpc", request_type=storage.BatchCommitWriteStreamsRequest -): +@pytest.mark.parametrize( + "request_type", [storage.BatchCommitWriteStreamsRequest, dict,] +) +def test_batch_commit_write_streams(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1246,10 +1229,6 @@ def test_batch_commit_write_streams( assert isinstance(response, storage.BatchCommitWriteStreamsResponse) -def test_batch_commit_write_streams_from_dict(): - test_batch_commit_write_streams(request_type=dict) - - def test_batch_commit_write_streams_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1436,7 +1415,8 @@ async def test_batch_commit_write_streams_flattened_error_async(): ) -def test_flush_rows(transport: str = "grpc", request_type=storage.FlushRowsRequest): +@pytest.mark.parametrize("request_type", [storage.FlushRowsRequest, dict,]) +def test_flush_rows(request_type, transport: str = "grpc"): client = BigQueryWriteClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1461,10 +1441,6 @@ def test_flush_rows(transport: str = "grpc", request_type=storage.FlushRowsReque assert response.offset == 647 -def test_flush_rows_from_dict(): - test_flush_rows(request_type=dict) - - def test_flush_rows_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2205,7 +2181,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From fd454e6c60f410b1bd4fbc37bda3bcbcb708538b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 07:41:06 -0500 Subject: [PATCH 11/14] chore(samples): Add check for tests in directory (#377) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- samples/quickstart/noxfile.py | 70 ++++++++++++++++++--------------- samples/snippets/noxfile.py | 70 ++++++++++++++++++--------------- samples/to_dataframe/noxfile.py | 70 ++++++++++++++++++--------------- 4 files changed, 118 insertions(+), 94 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f33299dd..6b8a73b3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index 93a9122c..3bbef5d5 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 93a9122c..3bbef5d5 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/to_dataframe/noxfile.py b/samples/to_dataframe/noxfile.py index 93a9122c..3bbef5d5 100644 --- a/samples/to_dataframe/noxfile.py +++ b/samples/to_dataframe/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 675d7cf2f8978cbdbebf962d8211a8c0a0f7d1d0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 12 Jan 2022 16:18:00 -0600 Subject: [PATCH 12/14] build: switch to release-please for tagging (#379) Source-Link: https://github.com/googleapis/synthtool/commit/f8077d237e0df2cb0066dfc6e09fc41e1c59646a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/release-please.yml | 1 + .github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .github/release-trigger.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 6b8a73b3..ff5126c1 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad05..466597e5 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 00000000..d4ca9418 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 33757d88c968fef65332f5ebe0b876758f978ab0 Mon Sep 17 00:00:00 2001 From: esert-g <48071655+esert-g@users.noreply.github.com> Date: Wed, 12 Jan 2022 15:01:03 -0800 Subject: [PATCH 13/14] feat: retryable resource exhausted handling (#366) BigQuery Storage Read API will start returning retryable RESOURCE_EXHAUSTED errors in 2022 when certain concurrency limits are hit, so this PR adds some code to handle them. Tested with unit tests and system tests. System tests ran successfully on a test project that intentionally returns retryable RESOURCE_EXHAUSTED errors. Co-authored-by: Tim Swast --- google/cloud/bigquery_storage_v1/client.py | 20 ++- google/cloud/bigquery_storage_v1/reader.py | 72 ++++++-- .../cloud/bigquery_storage_v1beta2/client.py | 20 ++- tests/unit/test_reader_v1.py | 164 +++++++++++++----- tests/unit/test_reader_v1_arrow.py | 47 ++--- 5 files changed, 234 insertions(+), 89 deletions(-) diff --git a/google/cloud/bigquery_storage_v1/client.py b/google/cloud/bigquery_storage_v1/client.py index 75ef3834..05f91ae9 100644 --- a/google/cloud/bigquery_storage_v1/client.py +++ b/google/cloud/bigquery_storage_v1/client.py @@ -47,6 +47,7 @@ def read_rows( retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=(), + retry_delay_callback=None, ): """ Reads rows from the table in the format prescribed by the read @@ -108,6 +109,12 @@ def read_rows( specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. + retry_delay_callback (Optional[Callable[[float], None]]): + If the client receives a retryable error that asks the client to + delay its next attempt and retry_delay_callback is not None, + BigQueryReadClient will call retry_delay_callback with the delay + duration (in seconds) before it starts sleeping until the next + attempt. Returns: ~google.cloud.bigquery_storage_v1.reader.ReadRowsStream: @@ -122,20 +129,15 @@ def read_rows( ValueError: If the parameters are invalid. """ gapic_client = super(BigQueryReadClient, self) - stream = gapic_client.read_rows( - read_stream=name, - offset=offset, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - return reader.ReadRowsStream( - stream, + stream = reader.ReadRowsStream( gapic_client, name, offset, {"retry": retry, "timeout": timeout, "metadata": metadata}, + retry_delay_callback=retry_delay_callback, ) + stream._reconnect() + return stream class BigQueryWriteClient(big_query_write.BigQueryWriteClient): diff --git a/google/cloud/bigquery_storage_v1/reader.py b/google/cloud/bigquery_storage_v1/reader.py index beb1dbb5..2e387eb2 100644 --- a/google/cloud/bigquery_storage_v1/reader.py +++ b/google/cloud/bigquery_storage_v1/reader.py @@ -17,12 +17,14 @@ import collections import io import json +import time try: import fastavro except ImportError: # pragma: NO COVER fastavro = None import google.api_core.exceptions +import google.rpc.error_details_pb2 try: import pandas @@ -79,16 +81,17 @@ class ReadRowsStream(object): If the pandas and fastavro libraries are installed, use the :func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.to_dataframe()` method to parse all messages into a :class:`pandas.DataFrame`. + + This object should not be created directly, but is returned by + other methods in this library. """ - def __init__(self, wrapped, client, name, offset, read_rows_kwargs): + def __init__( + self, client, name, offset, read_rows_kwargs, retry_delay_callback=None + ): """Construct a ReadRowsStream. Args: - wrapped (Iterable[ \ - ~google.cloud.bigquery_storage.types.ReadRowsResponse \ - ]): - The ReadRows stream to read. client ( \ ~google.cloud.bigquery_storage_v1.services. \ big_query_read.BigQueryReadClient \ @@ -106,6 +109,12 @@ def __init__(self, wrapped, client, name, offset, read_rows_kwargs): read_rows_kwargs (dict): Keyword arguments to use when reconnecting to a ReadRows stream. + retry_delay_callback (Optional[Callable[[float], None]]): + If the client receives a retryable error that asks the client to + delay its next attempt and retry_delay_callback is not None, + ReadRowsStream will call retry_delay_callback with the delay + duration (in seconds) before it starts sleeping until the next + attempt. Returns: Iterable[ \ @@ -116,11 +125,12 @@ def __init__(self, wrapped, client, name, offset, read_rows_kwargs): # Make a copy of the read position so that we can update it without # mutating the original input. - self._wrapped = wrapped self._client = client self._name = name self._offset = offset self._read_rows_kwargs = read_rows_kwargs + self._retry_delay_callback = retry_delay_callback + self._wrapped = None def __iter__(self): """An iterable of messages. @@ -131,9 +141,12 @@ def __iter__(self): ]: A sequence of row messages. """ - # Infinite loop to reconnect on reconnectable errors while processing # the row stream. + + if self._wrapped is None: + self._reconnect() + while True: try: for message in self._wrapped: @@ -152,14 +165,53 @@ def __iter__(self): except _STREAM_RESUMPTION_EXCEPTIONS: # Transient error, so reconnect to the stream. pass + except Exception as exc: + if not self._resource_exhausted_exception_is_retryable(exc): + raise self._reconnect() def _reconnect(self): """Reconnect to the ReadRows stream using the most recent offset.""" - self._wrapped = self._client.read_rows( - read_stream=self._name, offset=self._offset, **self._read_rows_kwargs - ) + while True: + try: + self._wrapped = self._client.read_rows( + read_stream=self._name, + offset=self._offset, + **self._read_rows_kwargs + ) + break + except Exception as exc: + if not self._resource_exhausted_exception_is_retryable(exc): + raise + + def _resource_exhausted_exception_is_retryable(self, exc): + if isinstance(exc, google.api_core.exceptions.ResourceExhausted): + # ResourceExhausted errors are only retried if a valid + # RetryInfo is provided with the error. + # + # TODO: Remove hasattr logic when we require google-api-core >= 2.2.0. + # ResourceExhausted added details/_details in google-api-core 2.2.0. + details = None + if hasattr(exc, "details"): + details = exc.details + elif hasattr(exc, "_details"): + details = exc._details + if details is not None: + for detail in details: + if isinstance(detail, google.rpc.error_details_pb2.RetryInfo): + retry_delay = detail.retry_delay + if retry_delay is not None: + delay = max( + 0, + float(retry_delay.seconds) + + (float(retry_delay.nanos) / 1e9), + ) + if self._retry_delay_callback: + self._retry_delay_callback(delay) + time.sleep(delay) + return True + return False def rows(self, read_session=None): """Iterate over all rows in the stream. diff --git a/google/cloud/bigquery_storage_v1beta2/client.py b/google/cloud/bigquery_storage_v1beta2/client.py index 00bff3ff..0dc428b9 100644 --- a/google/cloud/bigquery_storage_v1beta2/client.py +++ b/google/cloud/bigquery_storage_v1beta2/client.py @@ -48,6 +48,7 @@ def read_rows( retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=(), + retry_delay_callback=None, ): """ Reads rows from the table in the format prescribed by the read @@ -109,6 +110,12 @@ def read_rows( specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. + retry_delay_callback (Optional[Callable[[float], None]]): + If the client receives a retryable error that asks the client to + delay its next attempt and retry_delay_callback is not None, + BigQueryReadClient will call retry_delay_callback with the delay + duration (in seconds) before it starts sleeping until the next + attempt. Returns: ~google.cloud.bigquery_storage_v1.reader.ReadRowsStream: @@ -123,20 +130,15 @@ def read_rows( ValueError: If the parameters are invalid. """ gapic_client = super(BigQueryReadClient, self) - stream = gapic_client.read_rows( - read_stream=name, - offset=offset, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - return reader.ReadRowsStream( - stream, + stream = reader.ReadRowsStream( gapic_client, name, offset, {"retry": retry, "timeout": timeout, "metadata": metadata}, + retry_delay_callback=retry_delay_callback, ) + stream._reconnect() + return stream class BigQueryWriteClient(big_query_write.BigQueryWriteClient): diff --git a/tests/unit/test_reader_v1.py b/tests/unit/test_reader_v1.py index 59292843..826e8ea7 100644 --- a/tests/unit/test_reader_v1.py +++ b/tests/unit/test_reader_v1.py @@ -27,6 +27,7 @@ import google.api_core.exceptions from google.cloud.bigquery_storage import types from .helpers import SCALAR_COLUMNS, SCALAR_COLUMN_NAMES, SCALAR_BLOCKS +import google.rpc.error_details_pb2 PROJECT = "my-project" @@ -97,6 +98,29 @@ def _pages_w_resumable_internal_error(avro_blocks): ) +def _pages_w_nonresumable_resource_exhausted_error(avro_blocks): + for block in avro_blocks: + yield block + raise google.api_core.exceptions.ResourceExhausted( + "RESOURCE_EXHAUSTED: do not retry" + ) + + +def _pages_w_resumable_resource_exhausted_error( + avro_blocks, delay_seconds, delay_nanos +): + for block in avro_blocks: + yield block + retry_info = google.rpc.error_details_pb2.RetryInfo() + retry_info.retry_delay.seconds = delay_seconds + retry_info.retry_delay.nanos = delay_nanos + error = google.api_core.exceptions.ResourceExhausted( + "RESOURCE_EXHAUSTED: retry later" + ) + error._details = (retry_info,) + raise error + + def _pages_w_unavailable(pages): for page in pages: yield page @@ -144,7 +168,8 @@ def test_avro_rows_raises_import_error( monkeypatch.setattr(mut, "fastavro", None) avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) rows = iter(reader.rows()) # Since session isn't passed in, reader doesn't know serialization type @@ -159,7 +184,8 @@ def test_rows_no_schema_set_raises_type_error( avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) avro_blocks[0].avro_schema = None - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) rows = iter(reader.rows()) # Since session isn't passed in, reader doesn't know serialization type @@ -169,7 +195,8 @@ def test_rows_no_schema_set_raises_type_error( def test_rows_w_empty_stream(class_under_test, mock_gapic_client): - reader = class_under_test([], mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = [] + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.rows() assert tuple(got) == () @@ -177,8 +204,8 @@ def test_rows_w_empty_stream(class_under_test, mock_gapic_client): def test_rows_w_scalars(class_under_test, mock_gapic_client): avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) got = tuple(reader.rows()) expected = tuple(itertools.chain.from_iterable(SCALAR_BLOCKS)) @@ -198,22 +225,22 @@ def test_rows_w_timeout(class_under_test, mock_gapic_client): bq_blocks_2 = [[{"int_col": 567}, {"int_col": 789}], [{"int_col": 890}]] avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - mock_gapic_client.read_rows.return_value = avro_blocks_2 + mock_gapic_client.read_rows.side_effect = ( + avro_blocks_1, + avro_blocks_2, + ) reader = class_under_test( - avro_blocks_1, - mock_gapic_client, - "teststream", - 0, - {"metadata": {"test-key": "test-value"}}, + mock_gapic_client, "teststream", 0, {"metadata": {"test-key": "test-value"}}, ) with pytest.raises(google.api_core.exceptions.DeadlineExceeded): list(reader.rows()) - # Don't reconnect on DeadlineException. This allows user-specified timeouts - # to be respected. - mock_gapic_client.read_rows.assert_not_called() + # Don't reconnect on DeadlineException so user-specified timeouts + # are respected. This requires client.read_rows to be called + # exactly once which fails with DeadlineException. + mock_gapic_client.read_rows.assert_called_once() def test_rows_w_nonresumable_internal_error(class_under_test, mock_gapic_client): @@ -223,15 +250,43 @@ def test_rows_w_nonresumable_internal_error(class_under_test, mock_gapic_client) avro_blocks = _pages_w_nonresumable_internal_error( _bq_to_avro_blocks(bq_blocks, avro_schema) ) - - reader = class_under_test(avro_blocks, mock_gapic_client, "teststream", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "teststream", 0, {}) with pytest.raises( google.api_core.exceptions.InternalServerError, match="nonresumable error" ): list(reader.rows()) - mock_gapic_client.read_rows.assert_not_called() + mock_gapic_client.read_rows.assert_called_once() + + +def test_rows_w_nonresumable_resource_exhausted_error( + class_under_test, mock_gapic_client +): + bq_columns = [{"name": "int_col", "type": "int64"}] + avro_schema = _bq_to_avro_schema(bq_columns) + bq_blocks = [[{"int_col": 1024}, {"int_col": 512}], [{"int_col": 256}]] + avro_blocks = _pages_w_nonresumable_resource_exhausted_error( + _bq_to_avro_blocks(bq_blocks, avro_schema) + ) + + retry_delay_num_calls = 0 + + def retry_delay_callback(delay): + nonlocal retry_delay_num_calls + retry_delay_num_calls += 1 + + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "teststream", 0, {}) + + with pytest.raises( + google.api_core.exceptions.ResourceExhausted, match="do not retry" + ): + list(reader.rows()) + + mock_gapic_client.read_rows.assert_called_once() + assert retry_delay_num_calls == 0 def test_rows_w_reconnect(class_under_test, mock_gapic_client): @@ -249,20 +304,37 @@ def test_rows_w_reconnect(class_under_test, mock_gapic_client): bq_blocks_3 = [[{"int_col": -1}, {"int_col": -2}], [{"int_col": -4}]] avro_blocks_3 = _pages_w_unknown(_bq_to_avro_blocks(bq_blocks_3, avro_schema)) bq_blocks_4 = [[{"int_col": 567}, {"int_col": 789}], [{"int_col": 890}]] - avro_blocks_4 = _bq_to_avro_blocks(bq_blocks_4, avro_schema) + delay_seconds = 1 + delay_nanos = 234 + avro_blocks_4 = _pages_w_resumable_resource_exhausted_error( + _bq_to_avro_blocks(bq_blocks_4, avro_schema), delay_seconds, delay_nanos + ) + bq_blocks_5 = [[{"int_col": 859}, {"int_col": 231}], [{"int_col": 777}]] + avro_blocks_5 = _bq_to_avro_blocks(bq_blocks_5, avro_schema) mock_gapic_client.read_rows.side_effect = ( + avro_blocks_1, avro_blocks_2, avro_blocks_3, avro_blocks_4, + avro_blocks_5, ) + retry_delay_num_calls = 0 + retry_delay = 0 + + def retry_delay_callback(delay): + nonlocal retry_delay_num_calls + nonlocal retry_delay + retry_delay_num_calls += 1 + retry_delay = delay + reader = class_under_test( - avro_blocks_1, mock_gapic_client, "teststream", 0, {"metadata": {"test-key": "test-value"}}, + retry_delay_callback=retry_delay_callback, ) got = reader.rows() @@ -272,6 +344,7 @@ def test_rows_w_reconnect(class_under_test, mock_gapic_client): itertools.chain.from_iterable(bq_blocks_2), itertools.chain.from_iterable(bq_blocks_3), itertools.chain.from_iterable(bq_blocks_4), + itertools.chain.from_iterable(bq_blocks_5), ) ) @@ -282,9 +355,14 @@ def test_rows_w_reconnect(class_under_test, mock_gapic_client): mock_gapic_client.read_rows.assert_any_call( read_stream="teststream", offset=7, metadata={"test-key": "test-value"} ) - mock_gapic_client.read_rows.assert_called_with( + mock_gapic_client.read_rows.assert_any_call( read_stream="teststream", offset=10, metadata={"test-key": "test-value"} ) + mock_gapic_client.read_rows.assert_called_with( + read_stream="teststream", offset=13, metadata={"test-key": "test-value"} + ) + assert retry_delay_num_calls == 1 + assert retry_delay == delay_seconds + (delay_nanos / 1e9) def test_rows_w_reconnect_by_page(class_under_test, mock_gapic_client): @@ -298,14 +376,13 @@ def test_rows_w_reconnect_by_page(class_under_test, mock_gapic_client): bq_blocks_2 = [[{"int_col": 567}, {"int_col": 789}], [{"int_col": 890}]] avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - mock_gapic_client.read_rows.return_value = avro_blocks_2 + mock_gapic_client.read_rows.side_effect = ( + _pages_w_unavailable(avro_blocks_1), + avro_blocks_2, + ) reader = class_under_test( - _pages_w_unavailable(avro_blocks_1), - mock_gapic_client, - "teststream", - 0, - {"metadata": {"test-key": "test-value"}}, + mock_gapic_client, "teststream", 0, {"metadata": {"test-key": "test-value"}}, ) got = reader.rows() pages = iter(got.pages) @@ -341,7 +418,8 @@ def test_to_dataframe_no_pandas_raises_import_error( avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) with pytest.raises(ImportError): reader.to_dataframe() @@ -359,7 +437,8 @@ def test_to_dataframe_no_schema_set_raises_type_error( avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) avro_blocks[0].avro_schema = None - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) rows = reader.rows() # Since session isn't passed in, reader doesn't know serialization type @@ -368,11 +447,12 @@ def test_to_dataframe_no_schema_set_raises_type_error( rows.to_dataframe() -def test_to_dataframe_w_scalars(class_under_test): +def test_to_dataframe_w_scalars(class_under_test, mock_gapic_client): avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) avro_blocks = _bq_to_avro_blocks(SCALAR_BLOCKS, avro_schema) - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.to_dataframe() expected = pandas.DataFrame( @@ -397,7 +477,7 @@ def test_to_dataframe_w_scalars(class_under_test): ) -def test_to_dataframe_w_dtypes(class_under_test): +def test_to_dataframe_w_dtypes(class_under_test, mock_gapic_client): avro_schema = _bq_to_avro_schema( [ {"name": "bigfloat", "type": "float64"}, @@ -410,7 +490,8 @@ def test_to_dataframe_w_dtypes(class_under_test): ] avro_blocks = _bq_to_avro_blocks(blocks, avro_schema) - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.to_dataframe(dtypes={"lilfloat": "float16"}) expected = pandas.DataFrame( @@ -426,11 +507,12 @@ def test_to_dataframe_w_dtypes(class_under_test): ) -def test_to_dataframe_empty_w_scalars_avro(class_under_test): +def test_to_dataframe_empty_w_scalars_avro(class_under_test, mock_gapic_client): avro_schema = _bq_to_avro_schema(SCALAR_COLUMNS) read_session = _generate_avro_read_session(avro_schema) avro_blocks = _bq_to_avro_blocks([], avro_schema) - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) # Read session is needed to get a schema for empty streams. got = reader.to_dataframe(read_session) @@ -458,7 +540,8 @@ def test_to_dataframe_empty_w_dtypes_avro(class_under_test, mock_gapic_client): ) read_session = _generate_avro_read_session(avro_schema) avro_blocks = _bq_to_avro_blocks([], avro_schema) - reader = class_under_test(avro_blocks, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = avro_blocks + reader = class_under_test(mock_gapic_client, "", 0, {}) # Read session is needed to get a schema for empty streams. got = reader.to_dataframe(read_session, dtypes={"lilfloat": "float16"}) @@ -490,14 +573,13 @@ def test_to_dataframe_by_page(class_under_test, mock_gapic_client): avro_blocks_1 = _bq_to_avro_blocks(bq_blocks_1, avro_schema) avro_blocks_2 = _bq_to_avro_blocks(bq_blocks_2, avro_schema) - mock_gapic_client.read_rows.return_value = avro_blocks_2 + mock_gapic_client.read_rows.side_effect = ( + _pages_w_unavailable(avro_blocks_1), + avro_blocks_2, + ) reader = class_under_test( - _pages_w_unavailable(avro_blocks_1), - mock_gapic_client, - "teststream", - 0, - {"metadata": {"test-key": "test-value"}}, + mock_gapic_client, "teststream", 0, {"metadata": {"test-key": "test-value"}}, ) got = reader.rows() pages = iter(got.pages) diff --git a/tests/unit/test_reader_v1_arrow.py b/tests/unit/test_reader_v1_arrow.py index 02c7b80a..9cecb9d2 100644 --- a/tests/unit/test_reader_v1_arrow.py +++ b/tests/unit/test_reader_v1_arrow.py @@ -131,7 +131,8 @@ def test_pyarrow_rows_raises_import_error( monkeypatch.setattr(mut, "pyarrow", None) arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) rows = iter(reader.rows()) # Since session isn't passed in, reader doesn't know serialization type @@ -146,7 +147,8 @@ def test_to_arrow_no_pyarrow_raises_import_error( monkeypatch.setattr(mut, "pyarrow", None) arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) with pytest.raises(ImportError): reader.to_arrow() @@ -158,10 +160,11 @@ def test_to_arrow_no_pyarrow_raises_import_error( next(reader.rows().pages).to_arrow() -def test_to_arrow_w_scalars_arrow(class_under_test): +def test_to_arrow_w_scalars_arrow(class_under_test, mock_gapic_client): arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) actual_table = reader.to_arrow() expected_table = pyarrow.Table.from_batches( _bq_to_arrow_batch_objects(SCALAR_BLOCKS, arrow_schema) @@ -169,11 +172,11 @@ def test_to_arrow_w_scalars_arrow(class_under_test): assert actual_table == expected_table -def test_to_dataframe_w_scalars_arrow(class_under_test): +def test_to_dataframe_w_scalars_arrow(class_under_test, mock_gapic_client): arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.to_dataframe() expected = pandas.DataFrame( @@ -187,7 +190,8 @@ def test_to_dataframe_w_scalars_arrow(class_under_test): def test_rows_w_empty_stream_arrow(class_under_test, mock_gapic_client): - reader = class_under_test([], mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = [] + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.rows() assert tuple(got) == () @@ -195,8 +199,8 @@ def test_rows_w_empty_stream_arrow(class_under_test, mock_gapic_client): def test_rows_w_scalars_arrow(class_under_test, mock_gapic_client): arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) - - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) got = tuple( dict((key, value.as_py()) for key, value in row_dict.items()) for row_dict in reader.rows() @@ -206,7 +210,7 @@ def test_rows_w_scalars_arrow(class_under_test, mock_gapic_client): assert got == expected -def test_to_dataframe_w_dtypes_arrow(class_under_test): +def test_to_dataframe_w_dtypes_arrow(class_under_test, mock_gapic_client): arrow_schema = _bq_to_arrow_schema( [ {"name": "bigfloat", "type": "float64"}, @@ -218,8 +222,8 @@ def test_to_dataframe_w_dtypes_arrow(class_under_test): [{"bigfloat": 3.75, "lilfloat": 11.0}], ] arrow_batches = _bq_to_arrow_batches(blocks, arrow_schema) - - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.to_dataframe(dtypes={"lilfloat": "float16"}) expected = pandas.DataFrame( @@ -235,11 +239,12 @@ def test_to_dataframe_w_dtypes_arrow(class_under_test): ) -def test_to_dataframe_empty_w_scalars_arrow(class_under_test): +def test_to_dataframe_empty_w_scalars_arrow(class_under_test, mock_gapic_client): arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) read_session = _generate_arrow_read_session(arrow_schema) arrow_batches = _bq_to_arrow_batches([], arrow_schema) - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) # Read session is needed to get a schema for empty streams. got = reader.to_dataframe(read_session) @@ -267,7 +272,8 @@ def test_to_dataframe_empty_w_dtypes_arrow(class_under_test, mock_gapic_client): ) read_session = _generate_arrow_read_session(arrow_schema) arrow_batches = _bq_to_arrow_batches([], arrow_schema) - reader = class_under_test(arrow_batches, mock_gapic_client, "", 0, {}) + mock_gapic_client.read_rows.return_value = arrow_batches + reader = class_under_test(mock_gapic_client, "", 0, {}) # Read session is needed to get a schema for empty streams. got = reader.to_dataframe(read_session, dtypes={"lilfloat": "float16"}) @@ -309,11 +315,12 @@ def test_to_dataframe_by_page_arrow(class_under_test, mock_gapic_client): batch_1 = _bq_to_arrow_batches(bq_blocks_1, arrow_schema) batch_2 = _bq_to_arrow_batches(bq_blocks_2, arrow_schema) - mock_gapic_client.read_rows.return_value = batch_2 - - reader = class_under_test( - _pages_w_unavailable(batch_1), mock_gapic_client, "", 0, {} + mock_gapic_client.read_rows.side_effect = ( + _pages_w_unavailable(batch_1), + batch_2, ) + + reader = class_under_test(mock_gapic_client, "", 0, {}) got = reader.rows() pages = iter(got.pages) From 81c8b8cdcc43d5ab671d7da7e8fe6c58291e1d49 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 09:47:11 -0600 Subject: [PATCH 14/14] chore(main): release 2.11.0 (#378) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 8 ++++++++ setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0136b7c4..6e239143 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-bigquery-storage/#history +## [2.11.0](https://github.com/googleapis/python-bigquery-storage/compare/v2.10.1...v2.11.0) (2022-01-12) + + +### Features + +* add `write_mode` property to BigQuery Storage Write API v1 ([#360](https://github.com/googleapis/python-bigquery-storage/issues/360)) ([aa9740d](https://github.com/googleapis/python-bigquery-storage/commit/aa9740d352b2359171a3a99811f88e24ae927189)) +* retryable resource exhausted handling ([#366](https://github.com/googleapis/python-bigquery-storage/issues/366)) ([33757d8](https://github.com/googleapis/python-bigquery-storage/commit/33757d88c968fef65332f5ebe0b876758f978ab0)) + ### [2.10.1](https://www.github.com/googleapis/python-bigquery-storage/compare/v2.10.0...v2.10.1) (2021-11-11) diff --git a/setup.py b/setup.py index fe768452..fe6f5f89 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-bigquery-storage" description = "BigQuery Storage API API client library" -version = "2.10.1" +version = "2.11.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # NOTE: Maintainers, please do not require google-api-core>=2.x.x