From 2788736b80a4c4ac0ae3029aeb28bcefd34f2db7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 Jan 2021 22:51:55 +0100 Subject: [PATCH 01/11] chore(deps): update dependency google-cloud-bigquery to v2.6.2 (#466) --- samples/geography/requirements.txt | 2 +- samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index 3ea0e6e06..338cf2e89 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -1,3 +1,3 @@ geojson==2.5.0 -google-cloud-bigquery==2.6.1 +google-cloud-bigquery==2.6.2 Shapely==1.7.1 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 208eb4526..003dc6fb4 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-bigquery==2.6.1 +google-cloud-bigquery==2.6.2 google-cloud-bigquery-storage==2.1.0 google-auth-oauthlib==0.4.2 grpcio==1.34.0 From 1526e3936703fc97b17ec30c8ab8fe90197ad303 Mon Sep 17 00:00:00 2001 From: Carlos de la Guardia Date: Wed, 13 Jan 2021 09:55:33 -0600 Subject: [PATCH 02/11] feature: raise error for unknown properties in job config (#446) * feature: warn about unknown properties in job config * fix: raise error instead of warning * fix: use hasattr instead of __dict__ * fix bad merge * fix system test that sets wrong property Co-authored-by: Tim Swast --- google/cloud/bigquery/job/base.py | 8 ++++++++ tests/system.py | 6 ++---- tests/unit/job/test_base.py | 7 +++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/google/cloud/bigquery/job/base.py b/google/cloud/bigquery/job/base.py index 3c601f072..930b71e8a 100644 --- a/google/cloud/bigquery/job/base.py +++ b/google/cloud/bigquery/job/base.py @@ -659,6 +659,14 @@ def __init__(self, job_type, **kwargs): for prop, val in kwargs.items(): setattr(self, prop, val) + def __setattr__(self, name, value): + """Override to be able to raise error if an unknown property is being set""" + if not name.startswith("_") and not hasattr(type(self), name): + raise AttributeError( + "Property {} is unknown for {}.".format(name, type(self)) + ) + super(_JobConfig, self).__setattr__(name, value) + @property def labels(self): """Dict[str, str]: Labels for the job. diff --git a/tests/system.py b/tests/system.py index 102c8f78d..447f66b1a 100644 --- a/tests/system.py +++ b/tests/system.py @@ -76,7 +76,7 @@ from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.table import Table from google.cloud._helpers import UTC -from google.cloud.bigquery import dbapi +from google.cloud.bigquery import dbapi, enums from google.cloud import storage from test_utils.retry import RetryErrors @@ -1789,10 +1789,8 @@ def test_query_w_wrong_config(self): rows = list(Config.CLIENT.query("SELECT 1;").result()) assert rows[0][0] == 1 - project = Config.CLIENT.project - dataset_ref = bigquery.DatasetReference(project, "dset") bad_config = LoadJobConfig() - bad_config.destination = dataset_ref.table("tbl") + bad_config.source_format = enums.SourceFormat.CSV with self.assertRaises(Exception): Config.CLIENT.query(good_query, job_config=bad_config).result() diff --git a/tests/unit/job/test_base.py b/tests/unit/job/test_base.py index 478e30e6f..610ad2875 100644 --- a/tests/unit/job/test_base.py +++ b/tests/unit/job/test_base.py @@ -19,6 +19,7 @@ from google.api_core import exceptions import google.api_core.retry import mock +import pytest from .helpers import _make_client from .helpers import _make_connection @@ -1021,6 +1022,12 @@ def test_ctor(self): self.assertEqual(job_config._job_type, self.JOB_TYPE) self.assertEqual(job_config._properties, {self.JOB_TYPE: {}}) + def test_ctor_with_unknown_property_raises_error(self): + error_text = "Property wrong_name is unknown for" + with pytest.raises(AttributeError, match=error_text): + config = self._make_one() + config.wrong_name = None + def test_fill_from_default(self): from google.cloud.bigquery import QueryJobConfig From d7fc252563ba0b01a7dcf4af933d72bdfc005806 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 Jan 2021 23:56:25 +0100 Subject: [PATCH 03/11] chore(deps): update dependency grpcio to v1.34.1 (#470) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 003dc6fb4..de882844b 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,7 +1,7 @@ google-cloud-bigquery==2.6.2 google-cloud-bigquery-storage==2.1.0 google-auth-oauthlib==0.4.2 -grpcio==1.34.0 +grpcio==1.34.1 ipython==7.16.1; python_version < '3.7' ipython==7.17.0; python_version >= '3.7' matplotlib==3.3.3 From 80944f080bcc4fda870a6daf1d884de616d39ae7 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 14 Jan 2021 16:59:26 +0100 Subject: [PATCH 04/11] fix: reading the labels attribute on Job instances (#471) --- google/cloud/bigquery/job/base.py | 7 +++---- tests/system.py | 17 +++++++++++++++++ tests/unit/job/test_base.py | 2 +- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/google/cloud/bigquery/job/base.py b/google/cloud/bigquery/job/base.py index 930b71e8a..5ba01aa67 100644 --- a/google/cloud/bigquery/job/base.py +++ b/google/cloud/bigquery/job/base.py @@ -233,7 +233,7 @@ def path(self): @property def labels(self): """Dict[str, str]: Labels for the job.""" - return self._properties.setdefault("labels", {}) + return self._properties.setdefault("configuration", {}).setdefault("labels", {}) @property def etag(self): @@ -671,9 +671,8 @@ def __setattr__(self, name, value): def labels(self): """Dict[str, str]: Labels for the job. - This method always returns a dict. To change a job's labels, - modify the dict, then call ``Client.update_job``. To delete a - label, set its value to :data:`None` before updating. + This method always returns a dict. Once a job has been created on the + server, its labels cannot be modified anymore. Raises: ValueError: If ``value`` type is invalid. diff --git a/tests/system.py b/tests/system.py index 447f66b1a..0fa5bc41e 100644 --- a/tests/system.py +++ b/tests/system.py @@ -1667,6 +1667,23 @@ def test_job_cancel(self): # raise an error, and that the job completed (in the `retry()` # above). + def test_job_labels(self): + DATASET_ID = _make_dataset_id("job_cancel") + JOB_ID_PREFIX = "fetch_" + DATASET_ID + QUERY = "SELECT 1 as one" + + self.temp_dataset(DATASET_ID) + + job_config = bigquery.QueryJobConfig( + labels={"custom_label": "label_value", "another_label": "foo123"} + ) + job = Config.CLIENT.query( + QUERY, job_id_prefix=JOB_ID_PREFIX, job_config=job_config + ) + + expected_labels = {"custom_label": "label_value", "another_label": "foo123"} + self.assertEqual(job.labels, expected_labels) + def test_get_failed_job(self): # issue 4246 from google.api_core.exceptions import BadRequest diff --git a/tests/unit/job/test_base.py b/tests/unit/job/test_base.py index 610ad2875..44bbc2c77 100644 --- a/tests/unit/job/test_base.py +++ b/tests/unit/job/test_base.py @@ -251,7 +251,7 @@ def test_labels_hit(self): labels = {"foo": "bar"} client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties["labels"] = labels + job._properties.setdefault("configuration", {})["labels"] = labels self.assertEqual(job.labels, labels) def test_etag(self): From 9f6556cd299e02016811d2e0f495928d82e03667 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 20 Jan 2021 17:04:02 -0600 Subject: [PATCH 05/11] refactor: move system tests into `tests/system` directory (#475) This aligns more closely with the templates from synthtool. Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Towards #366 --- noxfile.py | 15 ++-- tests/system/__init__.py | 13 ++++ tests/{system.py => system/test_client.py} | 81 ++------------------- tests/system/test_magics.py | 83 ++++++++++++++++++++++ 4 files changed, 111 insertions(+), 81 deletions(-) create mode 100644 tests/system/__init__.py rename tests/{system.py => system/test_client.py} (97%) create mode 100644 tests/system/test_magics.py diff --git a/noxfile.py b/noxfile.py index f3326d01b..e6a739d1e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -115,9 +115,7 @@ def system(session): session.install("ipython", "-c", constraints_path) # Run py.test against the system tests. - session.run( - "py.test", "--quiet", os.path.join("tests", "system.py"), *session.posargs - ) + session.run("py.test", "--quiet", os.path.join("tests", "system"), *session.posargs) @nox.session(python=["3.8"]) @@ -181,12 +179,14 @@ def prerelease_deps(session): ) session.install("--pre", "grpcio", "pandas") session.install( + "freezegun", + "google-cloud-storage", + "google-cloud-testutils", + "IPython", "mock", + "psutil", "pytest", - "google-cloud-testutils", "pytest-cov", - "freezegun", - "IPython", ) session.install("-e", ".[all]") @@ -196,7 +196,8 @@ def prerelease_deps(session): session.run("python", "-c", "import pyarrow; print(pyarrow.__version__)") # Run all tests, except a few samples tests which require extra dependencies. - session.run("py.test", "tests") + session.run("py.test", "tests/unit") + session.run("py.test", "tests/system") session.run("py.test", "samples/tests") diff --git a/tests/system/__init__.py b/tests/system/__init__.py new file mode 100644 index 000000000..4fbd93bb2 --- /dev/null +++ b/tests/system/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/system.py b/tests/system/test_client.py similarity index 97% rename from tests/system.py rename to tests/system/test_client.py index 0fa5bc41e..aa1a03160 100644 --- a/tests/system.py +++ b/tests/system/test_client.py @@ -22,13 +22,12 @@ import json import operator import os +import pathlib import time import unittest import uuid -import re import psutil -import pytest import pytz import pkg_resources @@ -51,13 +50,6 @@ import pyarrow.types except ImportError: # pragma: NO COVER pyarrow = None -try: - import IPython - from IPython.utils import io as ipython_io - from IPython.testing import tools - from IPython.terminal import interactiveshell -except ImportError: # pragma: NO COVER - IPython = None from google.api_core.exceptions import PreconditionFailed from google.api_core.exceptions import BadRequest @@ -86,7 +78,7 @@ JOB_TIMEOUT = 120 # 2 minutes -WHERE = os.path.abspath(os.path.dirname(__file__)) +DATA_PATH = pathlib.Path(__file__).parent.parent / "data" # Common table data used for many tests. ROWS = [ @@ -149,10 +141,10 @@ def _make_dataset_id(prefix): return "%s%s" % (prefix, unique_resource_id()) -def _load_json_schema(filename="data/schema.json"): +def _load_json_schema(filename="schema.json"): from google.cloud.bigquery.table import _parse_schema_resource - json_filename = os.path.join(WHERE, filename) + json_filename = DATA_PATH / filename with open(json_filename, "r") as schema_file: return _parse_schema_resource(json.load(schema_file)) @@ -716,7 +708,7 @@ def test_load_table_from_local_avro_file_then_dump_table(self): table = Table(table_ref) self.to_delete.insert(0, table) - with open(os.path.join(WHERE, "data", "colors.avro"), "rb") as avrof: + with open(DATA_PATH / "colors.avro", "rb") as avrof: config = bigquery.LoadJobConfig() config.source_format = SourceFormat.AVRO config.write_disposition = WriteDisposition.WRITE_TRUNCATE @@ -1347,7 +1339,7 @@ def test_load_avro_from_uri_then_dump_table(self): ("orange", 590), ("red", 650), ] - with open(os.path.join(WHERE, "data", "colors.avro"), "rb") as f: + with open(DATA_PATH / "colors.avro", "rb") as f: GS_URL = self._write_avro_to_storage( "bq_load_test" + unique_resource_id(), "colors.avro", f ) @@ -2707,7 +2699,7 @@ def test_create_table_rows_fetch_nested_schema(self): to_insert = [] # Data is in "JSON Lines" format, see http://jsonlines.org/ - json_filename = os.path.join(WHERE, "data", "characters.jsonl") + json_filename = DATA_PATH / "characters.jsonl" with open(json_filename) as rows_file: for line in rows_file: to_insert.append(json.loads(line)) @@ -2979,47 +2971,6 @@ def temp_dataset(self, dataset_id, location=None): return dataset -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") -@pytest.mark.skipif(IPython is None, reason="Requires `ipython`") -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic(): - ip = IPython.get_ipython() - current_process = psutil.Process() - conn_count_start = len(current_process.connections()) - - ip.extension_manager.load_extension("google.cloud.bigquery") - sql = """ - SELECT - CONCAT( - 'https://stackoverflow.com/questions/', - CAST(id as STRING)) as url, - view_count - FROM `bigquery-public-data.stackoverflow.posts_questions` - WHERE tags like '%google-bigquery%' - ORDER BY view_count DESC - LIMIT 10 - """ - with ipython_io.capture_output() as captured: - result = ip.run_cell_magic("bigquery", "--use_rest_api", sql) - - conn_count_end = len(current_process.connections()) - - lines = re.split("\n|\r", captured.stdout) - # Removes blanks & terminal code (result of display clearing) - updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) - assert re.match("Executing query with job ID: .*", updates[0]) - assert all(re.match("Query executing: .*s", line) for line in updates[1:-1]) - assert re.match("Query complete after .*s", updates[-1]) - assert isinstance(result, pandas.DataFrame) - assert len(result) == 10 # verify row count - assert list(result) == ["url", "view_count"] # verify column names - - # NOTE: For some reason, the number of open sockets is sometimes one *less* - # than expected when running system tests on Kokoro, thus using the <= assertion. - # That's still fine, however, since the sockets are apparently not leaked. - assert conn_count_end <= conn_count_start # system resources are released - - def _job_done(instance): return instance.state.lower() == "done" @@ -3039,21 +2990,3 @@ def _table_exists(t): return True except NotFound: return False - - -@pytest.fixture(scope="session") -def ipython(): - config = tools.default_config() - config.TerminalInteractiveShell.simple_prompt = True - shell = interactiveshell.TerminalInteractiveShell.instance(config=config) - return shell - - -@pytest.fixture() -def ipython_interactive(request, ipython): - """Activate IPython's builtin hooks - - for the duration of the test scope. - """ - with ipython.builtin_trap: - yield ipython diff --git a/tests/system/test_magics.py b/tests/system/test_magics.py new file mode 100644 index 000000000..78c15cb50 --- /dev/null +++ b/tests/system/test_magics.py @@ -0,0 +1,83 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""System tests for Jupyter/IPython connector.""" + +import re + +import pytest +import psutil + + +IPython = pytest.importorskip("IPython") +io = pytest.importorskip("IPython.utils.io") +pandas = pytest.importorskip("pandas") +tools = pytest.importorskip("IPython.testing.tools") +interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell") + + +@pytest.fixture(scope="session") +def ipython(): + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + shell = interactiveshell.TerminalInteractiveShell.instance(config=config) + return shell + + +@pytest.fixture() +def ipython_interactive(ipython): + """Activate IPython's builtin hooks + + for the duration of the test scope. + """ + with ipython.builtin_trap: + yield ipython + + +def test_bigquery_magic(ipython_interactive): + ip = IPython.get_ipython() + current_process = psutil.Process() + conn_count_start = len(current_process.connections()) + + ip.extension_manager.load_extension("google.cloud.bigquery") + sql = """ + SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM `bigquery-public-data.stackoverflow.posts_questions` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + LIMIT 10 + """ + with io.capture_output() as captured: + result = ip.run_cell_magic("bigquery", "--use_rest_api", sql) + + conn_count_end = len(current_process.connections()) + + lines = re.split("\n|\r", captured.stdout) + # Removes blanks & terminal code (result of display clearing) + updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) + assert re.match("Executing query with job ID: .*", updates[0]) + assert all(re.match("Query executing: .*s", line) for line in updates[1:-1]) + assert re.match("Query complete after .*s", updates[-1]) + assert isinstance(result, pandas.DataFrame) + assert len(result) == 10 # verify row count + assert list(result) == ["url", "view_count"] # verify column names + + # NOTE: For some reason, the number of open sockets is sometimes one *less* + # than expected when running system tests on Kokoro, thus using the <= assertion. + # That's still fine, however, since the sockets are apparently not leaked. + assert conn_count_end <= conn_count_start # system resources are released From cac90626c5e7f2b4db2b50926e4bf483d088826f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 22 Jan 2021 18:46:27 +0100 Subject: [PATCH 06/11] chore(deps): update dependency grpcio to v1.35.0 (#477) @tswast confirmed this is ok to merge and that broken docs-presubmit is unrelateed --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index de882844b..603d49a72 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,7 +1,7 @@ google-cloud-bigquery==2.6.2 google-cloud-bigquery-storage==2.1.0 google-auth-oauthlib==0.4.2 -grpcio==1.34.1 +grpcio==1.35.0 ipython==7.16.1; python_version < '3.7' ipython==7.17.0; python_version >= '3.7' matplotlib==3.3.3 From 530e1e8d8fe8939e914a78ff1b220907c1b87af7 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 22 Jan 2021 23:35:12 +0100 Subject: [PATCH 07/11] fix: use explicitly given project over the client's default project for load jobs (#482) * fix: use project parameter if given for load jobs * blacken client tests * Refactor string concatenations in client tests * Silence invalid coverage complaint --- google/cloud/bigquery/client.py | 41 +++++++-- tests/unit/test_client.py | 157 +++++++++++++++++++++++--------- 2 files changed, 148 insertions(+), 50 deletions(-) diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index 3541726b8..b270075a9 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -2136,11 +2136,11 @@ def load_table_from_file( try: if size is None or size >= _MAX_MULTIPART_SIZE: response = self._do_resumable_upload( - file_obj, job_resource, num_retries, timeout + file_obj, job_resource, num_retries, timeout, project=project ) else: response = self._do_multipart_upload( - file_obj, job_resource, size, num_retries, timeout + file_obj, job_resource, size, num_retries, timeout, project=project ) except resumable_media.InvalidResponse as exc: raise exceptions.from_http_response(exc.response) @@ -2475,7 +2475,9 @@ def load_table_from_json( timeout=timeout, ) - def _do_resumable_upload(self, stream, metadata, num_retries, timeout): + def _do_resumable_upload( + self, stream, metadata, num_retries, timeout, project=None + ): """Perform a resumable upload. Args: @@ -2491,13 +2493,17 @@ def _do_resumable_upload(self, stream, metadata, num_retries, timeout): The number of seconds to wait for the underlying HTTP transport before using ``retry``. + project (Optional[str]): + Project ID of the project of where to run the upload. Defaults + to the client's project. + Returns: requests.Response: The "200 OK" response object returned after the final chunk is uploaded. """ upload, transport = self._initiate_resumable_upload( - stream, metadata, num_retries, timeout + stream, metadata, num_retries, timeout, project=project ) while not upload.finished: @@ -2505,7 +2511,9 @@ def _do_resumable_upload(self, stream, metadata, num_retries, timeout): return response - def _initiate_resumable_upload(self, stream, metadata, num_retries, timeout): + def _initiate_resumable_upload( + self, stream, metadata, num_retries, timeout, project=None + ): """Initiate a resumable upload. Args: @@ -2521,6 +2529,10 @@ def _initiate_resumable_upload(self, stream, metadata, num_retries, timeout): The number of seconds to wait for the underlying HTTP transport before using ``retry``. + project (Optional[str]): + Project ID of the project of where to run the upload. Defaults + to the client's project. + Returns: Tuple: Pair of @@ -2532,7 +2544,11 @@ def _initiate_resumable_upload(self, stream, metadata, num_retries, timeout): chunk_size = _DEFAULT_CHUNKSIZE transport = self._http headers = _get_upload_headers(self._connection.user_agent) - upload_url = _RESUMABLE_URL_TEMPLATE.format(project=self.project) + + if project is None: + project = self.project + upload_url = _RESUMABLE_URL_TEMPLATE.format(project=project) + # TODO: modify ResumableUpload to take a retry.Retry object # that it can use for the initial RPC. upload = ResumableUpload(upload_url, chunk_size, headers=headers) @@ -2553,7 +2569,9 @@ def _initiate_resumable_upload(self, stream, metadata, num_retries, timeout): return upload, transport - def _do_multipart_upload(self, stream, metadata, size, num_retries, timeout): + def _do_multipart_upload( + self, stream, metadata, size, num_retries, timeout, project=None + ): """Perform a multipart upload. Args: @@ -2574,6 +2592,10 @@ def _do_multipart_upload(self, stream, metadata, size, num_retries, timeout): The number of seconds to wait for the underlying HTTP transport before using ``retry``. + project (Optional[str]): + Project ID of the project of where to run the upload. Defaults + to the client's project. + Returns: requests.Response: The "200 OK" response object returned after the multipart @@ -2591,7 +2613,10 @@ def _do_multipart_upload(self, stream, metadata, size, num_retries, timeout): headers = _get_upload_headers(self._connection.user_agent) - upload_url = _MULTIPART_URL_TEMPLATE.format(project=self.project) + if project is None: + project = self.project + + upload_url = _MULTIPART_URL_TEMPLATE.format(project=project) upload = MultipartUpload(upload_url, headers=headers) if num_retries is not None: diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index bf183b5a4..625256e6e 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -4455,9 +4455,8 @@ def _initiate_resumable_upload_helper(self, num_retries=None): # Check the returned values. self.assertIsInstance(upload, ResumableUpload) upload_url = ( - "https://bigquery.googleapis.com/upload/bigquery/v2/projects/" - + self.PROJECT - + "/jobs?uploadType=resumable" + f"https://bigquery.googleapis.com/upload/bigquery/v2/projects/{self.PROJECT}" + "/jobs?uploadType=resumable" ) self.assertEqual(upload.upload_url, upload_url) expected_headers = _get_upload_headers(conn.user_agent) @@ -4498,7 +4497,9 @@ def test__initiate_resumable_upload(self): def test__initiate_resumable_upload_with_retry(self): self._initiate_resumable_upload_helper(num_retries=11) - def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): + def _do_multipart_upload_success_helper( + self, get_boundary, num_retries=None, project=None + ): from google.cloud.bigquery.client import _get_upload_headers from google.cloud.bigquery.job import LoadJob from google.cloud.bigquery.job import LoadJobConfig @@ -4508,6 +4509,9 @@ def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): client = self._make_one(project=self.PROJECT, _http=fake_transport) conn = client._connection = make_connection() + if project is None: + project = self.PROJECT + # Create some mock arguments. data = b"Bzzzz-zap \x00\x01\xf4" stream = io.BytesIO(data) @@ -4516,8 +4520,9 @@ def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): job = LoadJob(None, None, self.TABLE_REF, client, job_config=config) metadata = job.to_api_repr() size = len(data) + response = client._do_multipart_upload( - stream, metadata, size, num_retries, None + stream, metadata, size, num_retries, None, project=project ) # Check the mocks and the returned value. @@ -4526,35 +4531,39 @@ def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): get_boundary.assert_called_once_with() upload_url = ( - "https://bigquery.googleapis.com/upload/bigquery/v2/projects/" - + self.PROJECT - + "/jobs?uploadType=multipart" + f"https://bigquery.googleapis.com/upload/bigquery/v2/projects/{project}" + "/jobs?uploadType=multipart" ) payload = ( b"--==0==\r\n" - + b"content-type: application/json; charset=UTF-8\r\n\r\n" - + json.dumps(metadata).encode("utf-8") - + b"\r\n" - + b"--==0==\r\n" - + b"content-type: */*\r\n\r\n" - + data - + b"\r\n" - + b"--==0==--" - ) + b"content-type: application/json; charset=UTF-8\r\n\r\n" + b"%(json_metadata)s" + b"\r\n" + b"--==0==\r\n" + b"content-type: */*\r\n\r\n" + b"%(data)s" + b"\r\n" + b"--==0==--" + ) % {b"json_metadata": json.dumps(metadata).encode("utf-8"), b"data": data} + headers = _get_upload_headers(conn.user_agent) headers["content-type"] = b'multipart/related; boundary="==0=="' fake_transport.request.assert_called_once_with( "POST", upload_url, data=payload, headers=headers, timeout=mock.ANY ) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload(self, get_boundary): self._do_multipart_upload_success_helper(get_boundary) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_retry(self, get_boundary): self._do_multipart_upload_success_helper(get_boundary, num_retries=8) + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_with_custom_project(self, get_boundary): + self._do_multipart_upload_success_helper(get_boundary, project="custom-project") + def test_copy_table(self): from google.cloud.bigquery.job import CopyJob @@ -6364,10 +6373,10 @@ def test_insert_rows_from_dataframe(self): dataframe = pandas.DataFrame( [ - {"name": u"Little One", "age": 10, "adult": False}, - {"name": u"Young Gun", "age": 20, "adult": True}, - {"name": u"Dad", "age": 30, "adult": True}, - {"name": u"Stranger", "age": 40, "adult": True}, + {"name": "Little One", "age": 10, "adult": False}, + {"name": "Young Gun", "age": 20, "adult": True}, + {"name": "Dad", "age": 30, "adult": True}, + {"name": "Stranger", "age": 40, "adult": True}, ] ) @@ -6560,8 +6569,8 @@ def test_insert_rows_from_dataframe_w_explicit_none_insert_ids(self): dataframe = pandas.DataFrame( [ - {"name": u"Little One", "adult": False}, - {"name": u"Young Gun", "adult": True}, + {"name": "Little One", "adult": False}, + {"name": "Young Gun", "adult": True}, ] ) @@ -7230,17 +7239,18 @@ class TestClientUpload(object): # `pytest`-style tests rather than `unittest`-style. from google.cloud.bigquery.job import SourceFormat - TABLE_REF = DatasetReference("project_id", "test_dataset").table("test_table") + PROJECT = "project_id" + TABLE_REF = DatasetReference(PROJECT, "test_dataset").table("test_table") LOCATION = "us-central" - @staticmethod - def _make_client(transport=None, location=None): + @classmethod + def _make_client(cls, transport=None, location=None): from google.cloud.bigquery import _http from google.cloud.bigquery import client cl = client.Client( - project="project_id", + project=cls.PROJECT, credentials=_make_credentials(), _http=transport, location=location, @@ -7274,12 +7284,12 @@ def _make_do_upload_patch(cls, client, method, resource={}, side_effect=None): return mock.patch.object(client, method, side_effect=side_effect, autospec=True) EXPECTED_CONFIGURATION = { - "jobReference": {"projectId": "project_id", "jobId": "job_id"}, + "jobReference": {"projectId": PROJECT, "jobId": "job_id"}, "configuration": { "load": { "sourceFormat": SourceFormat.CSV, "destinationTable": { - "projectId": "project_id", + "projectId": PROJECT, "datasetId": "test_dataset", "tableId": "test_table", }, @@ -7325,7 +7335,11 @@ def test_load_table_from_file_resumable(self): ) do_upload.assert_called_once_with( - file_obj, self.EXPECTED_CONFIGURATION, _DEFAULT_NUM_RETRIES, None + file_obj, + self.EXPECTED_CONFIGURATION, + _DEFAULT_NUM_RETRIES, + None, + project=self.EXPECTED_CONFIGURATION["jobReference"]["projectId"], ) # the original config object should not have been modified @@ -7354,7 +7368,11 @@ def test_load_table_from_file_w_explicit_project(self): expected_resource["jobReference"]["location"] = self.LOCATION expected_resource["jobReference"]["projectId"] = "other-project" do_upload.assert_called_once_with( - file_obj, expected_resource, _DEFAULT_NUM_RETRIES, None + file_obj, + expected_resource, + _DEFAULT_NUM_RETRIES, + None, + project="other-project", ) def test_load_table_from_file_w_client_location(self): @@ -7384,7 +7402,11 @@ def test_load_table_from_file_w_client_location(self): expected_resource["jobReference"]["location"] = self.LOCATION expected_resource["jobReference"]["projectId"] = "other-project" do_upload.assert_called_once_with( - file_obj, expected_resource, _DEFAULT_NUM_RETRIES, None + file_obj, + expected_resource, + _DEFAULT_NUM_RETRIES, + None, + project="other-project", ) def test_load_table_from_file_resumable_metadata(self): @@ -7409,7 +7431,7 @@ def test_load_table_from_file_resumable_metadata(self): config.null_marker = r"\N" expected_config = { - "jobReference": {"projectId": "project_id", "jobId": "job_id"}, + "jobReference": {"projectId": self.PROJECT, "jobId": "job_id"}, "configuration": { "load": { "destinationTable": { @@ -7442,7 +7464,11 @@ def test_load_table_from_file_resumable_metadata(self): ) do_upload.assert_called_once_with( - file_obj, expected_config, _DEFAULT_NUM_RETRIES, None + file_obj, + expected_config, + _DEFAULT_NUM_RETRIES, + None, + project=self.EXPECTED_CONFIGURATION["jobReference"]["projectId"], ) def test_load_table_from_file_multipart(self): @@ -7471,6 +7497,7 @@ def test_load_table_from_file_multipart(self): file_obj_size, _DEFAULT_NUM_RETRIES, None, + project=self.PROJECT, ) def test_load_table_from_file_with_retries(self): @@ -7491,7 +7518,11 @@ def test_load_table_from_file_with_retries(self): ) do_upload.assert_called_once_with( - file_obj, self.EXPECTED_CONFIGURATION, num_retries, None + file_obj, + self.EXPECTED_CONFIGURATION, + num_retries, + None, + project=self.EXPECTED_CONFIGURATION["jobReference"]["projectId"], ) def test_load_table_from_file_with_rewind(self): @@ -7524,7 +7555,11 @@ def test_load_table_from_file_with_readable_gzip(self): ) do_upload.assert_called_once_with( - gzip_file, self.EXPECTED_CONFIGURATION, _DEFAULT_NUM_RETRIES, None + gzip_file, + self.EXPECTED_CONFIGURATION, + _DEFAULT_NUM_RETRIES, + None, + project=self.EXPECTED_CONFIGURATION["jobReference"]["projectId"], ) def test_load_table_from_file_with_writable_gzip(self): @@ -8169,7 +8204,7 @@ def test_load_table_from_dataframe_w_partial_schema(self): dtype="datetime64[ns]", ).dt.tz_localize(pytz.utc), ), - ("string_col", [u"abc", None, u"def"]), + ("string_col", ["abc", None, "def"]), ("bytes_col", [b"abc", b"def", None]), ] ) @@ -8228,7 +8263,7 @@ def test_load_table_from_dataframe_w_partial_schema_extra_types(self): [ ("int_col", [1, 2, 3]), ("int_as_float_col", [1.0, float("nan"), 3.0]), - ("string_col", [u"abc", None, u"def"]), + ("string_col", ["abc", None, "def"]), ] ) dataframe = pandas.DataFrame(df_data, columns=df_data.keys()) @@ -8263,7 +8298,7 @@ def test_load_table_from_dataframe_w_partial_schema_missing_types(self): client = self._make_client() df_data = collections.OrderedDict( [ - ("string_col", [u"abc", u"def", u"ghi"]), + ("string_col", ["abc", "def", "ghi"]), ("unknown_col", [b"jkl", None, b"mno"]), ] ) @@ -8317,7 +8352,7 @@ def test_load_table_from_dataframe_w_schema_arrow_custom_compression(self): from google.cloud.bigquery.schema import SchemaField client = self._make_client() - records = [{"name": u"Monty", "age": 100}, {"name": u"Python", "age": 60}] + records = [{"name": "Monty", "age": 100}, {"name": "Python", "age": 60}] dataframe = pandas.DataFrame(records) schema = (SchemaField("name", "STRING"), SchemaField("age", "INTEGER")) job_config = job.LoadJobConfig(schema=schema) @@ -8658,6 +8693,44 @@ def test__do_resumable_upload(self): timeout=mock.ANY, ) + def test__do_resumable_upload_custom_project(self): + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + transport = self._make_transport( + self._make_resumable_upload_responses(file_obj_len) + ) + client = self._make_client(transport) + + result = client._do_resumable_upload( + file_obj, self.EXPECTED_CONFIGURATION, None, None, project="custom-project", + ) + + content = result.content.decode("utf-8") + assert json.loads(content) == {"size": file_obj_len} + + # Verify that configuration data was passed in with the initial + # request. + transport.request.assert_any_call( + "POST", + mock.ANY, + data=json.dumps(self.EXPECTED_CONFIGURATION).encode("utf-8"), + headers=mock.ANY, + timeout=mock.ANY, + ) + + # Check the project ID used in the call to initiate resumable upload. + initiation_url = next( + ( + call.args[1] + for call in transport.request.call_args_list + if call.args[0] == "POST" and "uploadType=resumable" in call.args[1] + ), + None, + ) # pragma: NO COVER + + assert initiation_url is not None + assert "projects/custom-project" in initiation_url + def test__do_multipart_upload(self): transport = self._make_transport([self._make_response(http.client.OK)]) client = self._make_client(transport) From 61b438523d305ce66a68fde7cb49e9abbf0a8d1d Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 25 Jan 2021 18:04:04 +0100 Subject: [PATCH 08/11] fix: invalid conversion of timezone-aware datetime values to JSON (#480) * fix: correctly convert timezone-aware datetimes * blacken * Remove python-dateutil test dependency * Remove unused dst() methods --- google/cloud/bigquery/_helpers.py | 8 ++++ tests/unit/test__helpers.py | 77 +++++++++++++++++-------------- 2 files changed, 51 insertions(+), 34 deletions(-) diff --git a/google/cloud/bigquery/_helpers.py b/google/cloud/bigquery/_helpers.py index 6b66a3020..daa14b92a 100644 --- a/google/cloud/bigquery/_helpers.py +++ b/google/cloud/bigquery/_helpers.py @@ -315,6 +315,10 @@ def _timestamp_to_json_parameter(value): def _timestamp_to_json_row(value): """Coerce 'value' to an JSON-compatible representation.""" if isinstance(value, datetime.datetime): + # For naive datetime objects UTC timezone is assumed, thus we format + # those to string directly without conversion. + if value.tzinfo is not None: + value = value.astimezone(UTC) value = value.strftime(_RFC3339_MICROS) return value @@ -322,6 +326,10 @@ def _timestamp_to_json_row(value): def _datetime_to_json(value): """Coerce 'value' to an JSON-compatible representation.""" if isinstance(value, datetime.datetime): + # For naive datetime objects UTC timezone is assumed, thus we format + # those to string directly without conversion. + if value.tzinfo is not None: + value = value.astimezone(UTC) value = value.strftime(_RFC3339_MICROS_NO_ZULU) return value diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 8948d4152..0fdf1142f 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -420,13 +420,13 @@ def _call_fut(self, row, schema): def test_w_single_scalar_column(self): # SELECT 1 AS col col = _Field("REQUIRED", "col", "INTEGER") - row = {u"f": [{u"v": u"1"}]} + row = {"f": [{"v": "1"}]} self.assertEqual(self._call_fut(row, schema=[col]), (1,)) def test_w_single_scalar_geography_column(self): # SELECT 1 AS col col = _Field("REQUIRED", "geo", "GEOGRAPHY") - row = {u"f": [{u"v": u"POINT(1, 2)"}]} + row = {"f": [{"v": "POINT(1, 2)"}]} self.assertEqual(self._call_fut(row, schema=[col]), ("POINT(1, 2)",)) def test_w_single_struct_column(self): @@ -434,13 +434,13 @@ def test_w_single_struct_column(self): sub_1 = _Field("REQUIRED", "sub_1", "INTEGER") sub_2 = _Field("REQUIRED", "sub_2", "INTEGER") col = _Field("REQUIRED", "col", "RECORD", fields=[sub_1, sub_2]) - row = {u"f": [{u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}]}}]} + row = {"f": [{"v": {"f": [{"v": "1"}, {"v": "2"}]}}]} self.assertEqual(self._call_fut(row, schema=[col]), ({"sub_1": 1, "sub_2": 2},)) def test_w_single_array_column(self): # SELECT [1, 2, 3] as col col = _Field("REPEATED", "col", "INTEGER") - row = {u"f": [{u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}]} + row = {"f": [{"v": [{"v": "1"}, {"v": "2"}, {"v": "3"}]}]} self.assertEqual(self._call_fut(row, schema=[col]), ([1, 2, 3],)) def test_w_struct_w_nested_array_column(self): @@ -450,13 +450,13 @@ def test_w_struct_w_nested_array_column(self): third = _Field("REPEATED", "third", "INTEGER") col = _Field("REQUIRED", "col", "RECORD", fields=[first, second, third]) row = { - u"f": [ + "f": [ { - u"v": { - u"f": [ - {u"v": [{u"v": u"1"}, {u"v": u"2"}]}, - {u"v": u"3"}, - {u"v": [{u"v": u"4"}, {u"v": u"5"}]}, + "v": { + "f": [ + {"v": [{"v": "1"}, {"v": "2"}]}, + {"v": "3"}, + {"v": [{"v": "4"}, {"v": "5"}]}, ] } } @@ -464,7 +464,7 @@ def test_w_struct_w_nested_array_column(self): } self.assertEqual( self._call_fut(row, schema=[col]), - ({u"first": [1, 2], u"second": 3, u"third": [4, 5]},), + ({"first": [1, 2], "second": 3, "third": [4, 5]},), ) def test_w_array_of_struct(self): @@ -474,11 +474,11 @@ def test_w_array_of_struct(self): third = _Field("REQUIRED", "third", "INTEGER") col = _Field("REPEATED", "col", "RECORD", fields=[first, second, third]) row = { - u"f": [ + "f": [ { - u"v": [ - {u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}}, - {u"v": {u"f": [{u"v": u"4"}, {u"v": u"5"}, {u"v": u"6"}]}}, + "v": [ + {"v": {"f": [{"v": "1"}, {"v": "2"}, {"v": "3"}]}}, + {"v": {"f": [{"v": "4"}, {"v": "5"}, {"v": "6"}]}}, ] } ] @@ -487,8 +487,8 @@ def test_w_array_of_struct(self): self._call_fut(row, schema=[col]), ( [ - {u"first": 1, u"second": 2, u"third": 3}, - {u"first": 4, u"second": 5, u"third": 6}, + {"first": 1, "second": 2, "third": 3}, + {"first": 4, "second": 5, "third": 6}, ], ), ) @@ -499,32 +499,25 @@ def test_w_array_of_struct_w_array(self): second = _Field("REQUIRED", "second", "INTEGER") col = _Field("REPEATED", "col", "RECORD", fields=[first, second]) row = { - u"f": [ + "f": [ { - u"v": [ - { - u"v": { - u"f": [ - {u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}, - {u"v": u"4"}, - ] - } - }, + "v": [ { - u"v": { - u"f": [ - {u"v": [{u"v": u"5"}, {u"v": u"6"}]}, - {u"v": u"7"}, + "v": { + "f": [ + {"v": [{"v": "1"}, {"v": "2"}, {"v": "3"}]}, + {"v": "4"}, ] } }, + {"v": {"f": [{"v": [{"v": "5"}, {"v": "6"}]}, {"v": "7"}]}}, ] } ] } self.assertEqual( self._call_fut(row, schema=[col]), - ([{u"first": [1, 2, 3], u"second": 4}, {u"first": [5, 6], u"second": 7}],), + ([{"first": [1, 2, 3], "second": 4}, {"first": [5, 6], "second": 7}],), ) @@ -673,7 +666,7 @@ def test_w_non_bytes(self): def test_w_bytes(self): source = b"source" - expected = u"c291cmNl" + expected = "c291cmNl" converted = self._call_fut(source) self.assertEqual(converted, expected) @@ -726,7 +719,7 @@ def test_w_string(self): ZULU = "2016-12-20 15:58:27.339328+00:00" self.assertEqual(self._call_fut(ZULU), ZULU) - def test_w_datetime(self): + def test_w_datetime_no_zone(self): when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) self.assertEqual(self._call_fut(when), "2016-12-20T15:58:27.339328Z") @@ -736,6 +729,14 @@ def test_w_datetime_w_utc_zone(self): when = datetime.datetime(2020, 11, 17, 1, 6, 52, 353795, tzinfo=UTC) self.assertEqual(self._call_fut(when), "2020-11-17T01:06:52.353795Z") + def test_w_datetime_w_non_utc_zone(self): + class EstZone(datetime.tzinfo): + def utcoffset(self, _): + return datetime.timedelta(minutes=-300) + + when = datetime.datetime(2020, 11, 17, 1, 6, 52, 353795, tzinfo=EstZone()) + self.assertEqual(self._call_fut(when), "2020-11-17T06:06:52.353795Z") + class Test_datetime_to_json(unittest.TestCase): def _call_fut(self, value): @@ -753,6 +754,14 @@ def test_w_datetime(self): when = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456, tzinfo=UTC) self.assertEqual(self._call_fut(when), "2016-12-03T14:11:27.123456") + def test_w_datetime_w_non_utc_zone(self): + class EstZone(datetime.tzinfo): + def utcoffset(self, _): + return datetime.timedelta(minutes=-300) + + when = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456, tzinfo=EstZone()) + self.assertEqual(self._call_fut(when), "2016-12-03T19:11:27.123456") + class Test_date_to_json(unittest.TestCase): def _call_fut(self, value): From f187fb01440fc17f7f1694f9b559bb5b2fbae1d0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 27 Jan 2021 21:44:48 +0100 Subject: [PATCH 09/11] chore(deps): update dependency google-cloud-bigquery-storage to v2.2.1 (#485) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 603d49a72..599b6d52f 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ google-cloud-bigquery==2.6.2 -google-cloud-bigquery-storage==2.1.0 +google-cloud-bigquery-storage==2.2.1 google-auth-oauthlib==0.4.2 grpcio==1.35.0 ipython==7.16.1; python_version < '3.7' From 55daa7da9857a8a2fb14a80a4efa3f466386a85f Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 27 Jan 2021 21:54:02 +0100 Subject: [PATCH 10/11] deps: declare support for Python 3.9 (#488) Towards #462. With wheels for `pyarrow==3.0.0` released we can now officially support Python 3.9 - well, at least after when we add Python 3.9 checks to Kokoro. PR checklist: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- README.rst | 2 +- noxfile.py | 22 +++++++++++++--------- setup.py | 11 ++++------- testing/constraints-3.6.txt | 20 +++++--------------- 4 files changed, 23 insertions(+), 32 deletions(-) diff --git a/README.rst b/README.rst index 61192b625..8454cf9c0 100644 --- a/README.rst +++ b/README.rst @@ -52,7 +52,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6, < 3.9 +Python >= 3.6, < 3.10 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/noxfile.py b/noxfile.py index e6a739d1e..942525ca9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,6 +23,10 @@ BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ("docs", "google", "samples", "tests", "noxfile.py", "setup.py") + +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' @@ -80,13 +84,13 @@ def default(session): ) -@nox.session(python=["3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["3.8"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -118,7 +122,7 @@ def system(session): session.run("py.test", "--quiet", os.path.join("tests", "system"), *session.posargs) -@nox.session(python=["3.8"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def snippets(session): """Run the snippets test suite.""" @@ -154,7 +158,7 @@ def snippets(session): ) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -166,7 +170,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.8") +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def prerelease_deps(session): """Run all tests with prerelease versions of dependencies installed. @@ -201,7 +205,7 @@ def prerelease_deps(session): session.run("py.test", "samples/tests") -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -218,7 +222,7 @@ def lint(session): session.run("black", "--check", *BLACK_PATHS) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" @@ -239,7 +243,7 @@ def blacken(session): session.run("black", *BLACK_PATHS) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs.""" @@ -262,7 +266,7 @@ def docs(session): ) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docfx(session): """Build the docfx yaml files for this library.""" diff --git a/setup.py b/setup.py index 0ea6ccca2..ea2df4843 100644 --- a/setup.py +++ b/setup.py @@ -45,13 +45,9 @@ # grpc.Channel.close() method isn't added until 1.32.0. # https://github.com/grpc/grpc/pull/15254 "grpcio >= 1.32.0, < 2.0dev", - "pyarrow >= 1.0.0, < 3.0dev", - ], - "pandas": [ - "pandas>=0.23.0", - # pyarrow 1.0.0 is required for the use of timestamp_as_object keyword. - "pyarrow >= 1.0.0, < 3.0dev", + "pyarrow >= 1.0.0, < 4.0dev", ], + "pandas": ["pandas>=0.23.0", "pyarrow >= 1.0.0, < 4.0dev",], "tqdm": ["tqdm >= 4.7.4, <5.0.0dev"], "opentelemetry": [ "opentelemetry-api==0.11b0", @@ -112,6 +108,7 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -120,7 +117,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.6, <3.9", + python_requires=">=3.6, <3.10", include_package_data=True, zip_safe=False, ) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index fe2bcfda7..c4a5c51be 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -6,26 +6,16 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==1.23.0 -proto-plus==1.10.0 +google-cloud-bigquery-storage==2.0.0 google-cloud-core==1.4.1 google-resumable-media==0.6.0 -six==1.13.0 -protobuf==3.12.0 -google-cloud-bigquery-storage==2.0.0 grpcio==1.32.0 -pyarrow==1.0.0 -pandas==0.23.0 -pyarrow==1.0.0 -tqdm==4.7.4 opentelemetry-api==0.11b0 -opentelemetry-sdk==0.11b0 opentelemetry-instrumentation==0.11b0 -google-cloud-bigquery-storage==2.0.0 -grpcio==1.32.0 -pyarrow==1.0.0 -opentelemetry-api==0.11b0 opentelemetry-sdk==0.11b0 -opentelemetry-instrumentation==0.11b0 pandas==0.23.0 +proto-plus==1.10.0 +protobuf==3.12.0 pyarrow==1.0.0 -tqdm==4.7.4 \ No newline at end of file +six==1.13.0 +tqdm==4.7.4 From d5735ea378eaeea8c0ecbed89daf02f5102c6a63 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 27 Jan 2021 21:08:02 +0000 Subject: [PATCH 11/11] chore: release 2.7.0 (#472) :robot: I have created a release \*beep\* \*boop\* --- ## [2.7.0](https://www.github.com/googleapis/python-bigquery/compare/v2.6.2...v2.7.0) (2021-01-27) ### Bug Fixes * invalid conversion of timezone-aware datetime values to JSON ([#480](https://www.github.com/googleapis/python-bigquery/issues/480)) ([61b4385](https://www.github.com/googleapis/python-bigquery/commit/61b438523d305ce66a68fde7cb49e9abbf0a8d1d)) * reading the labels attribute on Job instances ([#471](https://www.github.com/googleapis/python-bigquery/issues/471)) ([80944f0](https://www.github.com/googleapis/python-bigquery/commit/80944f080bcc4fda870a6daf1d884de616d39ae7)) * use explicitly given project over the client's default project for load jobs ([#482](https://www.github.com/googleapis/python-bigquery/issues/482)) ([530e1e8](https://www.github.com/googleapis/python-bigquery/commit/530e1e8d8fe8939e914a78ff1b220907c1b87af7)) ### Dependencies * declare support for Python 3.9 ([#488](https://www.github.com/googleapis/python-bigquery/issues/488)) ([55daa7d](https://www.github.com/googleapis/python-bigquery/commit/55daa7da9857a8a2fb14a80a4efa3f466386a85f)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 14 ++++++++++++++ google/cloud/bigquery/version.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d58072e8..a58510c66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## [2.7.0](https://www.github.com/googleapis/python-bigquery/compare/v2.6.2...v2.7.0) (2021-01-27) + + +### Bug Fixes + +* invalid conversion of timezone-aware datetime values to JSON ([#480](https://www.github.com/googleapis/python-bigquery/issues/480)) ([61b4385](https://www.github.com/googleapis/python-bigquery/commit/61b438523d305ce66a68fde7cb49e9abbf0a8d1d)) +* reading the labels attribute on Job instances ([#471](https://www.github.com/googleapis/python-bigquery/issues/471)) ([80944f0](https://www.github.com/googleapis/python-bigquery/commit/80944f080bcc4fda870a6daf1d884de616d39ae7)) +* use explicitly given project over the client's default project for load jobs ([#482](https://www.github.com/googleapis/python-bigquery/issues/482)) ([530e1e8](https://www.github.com/googleapis/python-bigquery/commit/530e1e8d8fe8939e914a78ff1b220907c1b87af7)) + + +### Dependencies + +* declare support for Python 3.9 ([#488](https://www.github.com/googleapis/python-bigquery/issues/488)) ([55daa7d](https://www.github.com/googleapis/python-bigquery/commit/55daa7da9857a8a2fb14a80a4efa3f466386a85f)) + ### [2.6.2](https://www.github.com/googleapis/python-bigquery/compare/v2.6.1...v2.6.2) (2021-01-11) diff --git a/google/cloud/bigquery/version.py b/google/cloud/bigquery/version.py index 9aaeb8bc4..d962613e0 100644 --- a/google/cloud/bigquery/version.py +++ b/google/cloud/bigquery/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.6.2" +__version__ = "2.7.0"