Skip to content

Commit c2496a1

Browse files
authored
fix: updates a number of optional dependencies (#1864)
This fix updates a number of optional dependencies. We use a different module import process (pytest.importorskip versus unittest.skipif). This first major commit gets the ball rolling, there are gonna be a few additional commits to cover other files. Fixes #<issue_number_goes_here> 🦕
1 parent 08b1e6f commit c2496a1

File tree

9 files changed

+311
-405
lines changed

9 files changed

+311
-405
lines changed

google/cloud/bigquery/_tqdm_helpers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def get_progress_bar(progress_bar_type, description, total, unit):
6767
)
6868
elif progress_bar_type == "tqdm_gui":
6969
return tqdm.tqdm_gui(desc=description, total=total, unit=unit)
70-
except (KeyError, TypeError):
70+
except (KeyError, TypeError): # pragma: NO COVER
7171
# Protect ourselves from any tqdm errors. In case of
7272
# unexpected tqdm behavior, just fall back to showing
7373
# no progress bar.

google/cloud/bigquery/client.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -593,7 +593,7 @@ def _ensure_bqstorage_client(
593593
)
594594
return None
595595

596-
if bqstorage_client is None:
596+
if bqstorage_client is None: # pragma: NO COVER
597597
bqstorage_client = bigquery_storage.BigQueryReadClient(
598598
credentials=self._credentials,
599599
client_options=client_options,

setup.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,9 @@
4545
]
4646
pyarrow_dependency = "pyarrow >= 3.0.0"
4747
extras = {
48-
# Keep the no-op bqstorage extra for backward compatibility.
49-
# See: https://github.com/googleapis/python-bigquery/issues/757
48+
# bqstorage had a period where it was a required dependency, and has been
49+
# moved back to optional due to bloat. See
50+
# https://github.com/googleapis/python-bigquery/issues/1196 for more background.
5051
"bqstorage": [
5152
"google-cloud-bigquery-storage >= 2.6.0, <3.0.0dev",
5253
# Due to an issue in pip's dependency resolver, the `grpc` extra is not

tests/system/test_client.py

+7-21
Original file line numberDiff line numberDiff line change
@@ -54,16 +54,6 @@
5454

5555
from . import helpers
5656

57-
try:
58-
from google.cloud import bigquery_storage
59-
except ImportError: # pragma: NO COVER
60-
bigquery_storage = None
61-
62-
try:
63-
import pyarrow
64-
import pyarrow.types
65-
except ImportError: # pragma: NO COVER
66-
pyarrow = None
6757

6858
JOB_TIMEOUT = 120 # 2 minutes
6959
DATA_PATH = pathlib.Path(__file__).parent.parent / "data"
@@ -1772,11 +1762,10 @@ def test_dbapi_fetchall_from_script(self):
17721762
row_tuples = [r.values() for r in rows]
17731763
self.assertEqual(row_tuples, [(5, "foo"), (6, "bar"), (7, "baz")])
17741764

1775-
@unittest.skipIf(
1776-
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
1777-
)
1778-
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
17791765
def test_dbapi_fetch_w_bqstorage_client_large_result_set(self):
1766+
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
1767+
pytest.importorskip("pyarrow")
1768+
17801769
bqstorage_client = bigquery_storage.BigQueryReadClient(
17811770
credentials=Config.CLIENT._credentials
17821771
)
@@ -1834,10 +1823,8 @@ def test_dbapi_dry_run_query(self):
18341823

18351824
self.assertEqual(list(rows), [])
18361825

1837-
@unittest.skipIf(
1838-
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
1839-
)
18401826
def test_dbapi_connection_does_not_leak_sockets(self):
1827+
pytest.importorskip("google.cloud.bigquery_storage")
18411828
current_process = psutil.Process()
18421829
conn_count_start = len(current_process.connections())
18431830

@@ -2382,11 +2369,10 @@ def test_create_table_rows_fetch_nested_schema(self):
23822369
self.assertEqual(found[7], e_favtime)
23832370
self.assertEqual(found[8], decimal.Decimal(expected["FavoriteNumber"]))
23842371

2385-
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
2386-
@unittest.skipIf(
2387-
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
2388-
)
23892372
def test_nested_table_to_arrow(self):
2373+
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
2374+
pyarrow = pytest.importorskip("pyarrow")
2375+
pyarrow.types = pytest.importorskip("pyarrow.types")
23902376
from google.cloud.bigquery.job import SourceFormat
23912377
from google.cloud.bigquery.job import WriteDisposition
23922378

0 commit comments

Comments
 (0)