Skip to content

Commit d7f0531

Browse files
committed
Make method signatures compatible again
The annotations caused a mismatch
1 parent 996baa3 commit d7f0531

File tree

2 files changed

+8
-7
lines changed

2 files changed

+8
-7
lines changed

google/cloud/bigquery/job/query.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1342,7 +1342,7 @@ def result(
13421342
def to_arrow(
13431343
self,
13441344
progress_bar_type: str = None,
1345-
bqstorage_client: "bigquery_storage.BigQueryReadClient" = None,
1345+
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
13461346
create_bqstorage_client: bool = True,
13471347
max_results: Optional[int] = None,
13481348
) -> "pyarrow.Table":
@@ -1412,7 +1412,7 @@ def to_arrow(
14121412
# that should only exist here in the QueryJob method.
14131413
def to_dataframe(
14141414
self,
1415-
bqstorage_client: "bigquery_storage.BigQueryReadClient" = None,
1415+
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
14161416
dtypes: Dict[str, Any] = None,
14171417
progress_bar_type: str = None,
14181418
create_bqstorage_client: bool = True,

google/cloud/bigquery/table.py

+6-5
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
from google.api_core.page_iterator import HTTPIterator
3737

3838
import google.cloud._helpers
39-
from google.cloud import bigquery_storage
4039
from google.cloud.bigquery import _helpers
4140
from google.cloud.bigquery import _pandas_helpers
4241
from google.cloud.bigquery.exceptions import LegacyBigQueryStorageError
@@ -48,6 +47,8 @@
4847
from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration
4948

5049
if typing.TYPE_CHECKING: # pragma: NO COVER
50+
from google.cloud import bigquery_storage
51+
5152
# Unconditionally import optional dependencies again to tell pytype that
5253
# they are not None, avoiding false "no attribute" errors.
5354
import pandas
@@ -1628,7 +1629,7 @@ def _to_arrow_iterable(self, bqstorage_client=None):
16281629
def to_arrow(
16291630
self,
16301631
progress_bar_type: str = None,
1631-
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
1632+
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
16321633
create_bqstorage_client: bool = True,
16331634
) -> "pyarrow.Table":
16341635
"""[Beta] Create a class:`pyarrow.Table` by loading all pages of a
@@ -1723,7 +1724,7 @@ def to_arrow(
17231724

17241725
def to_dataframe_iterable(
17251726
self,
1726-
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
1727+
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
17271728
dtypes: Dict[str, Any] = None,
17281729
max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT,
17291730
) -> "pandas.DataFrame":
@@ -1797,7 +1798,7 @@ def to_dataframe_iterable(
17971798
# changes to job.QueryJob.to_dataframe()
17981799
def to_dataframe(
17991800
self,
1800-
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
1801+
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
18011802
dtypes: Dict[str, Any] = None,
18021803
progress_bar_type: str = None,
18031804
create_bqstorage_client: bool = True,
@@ -1978,7 +1979,7 @@ def to_dataframe(
19781979

19791980
def to_dataframe_iterable(
19801981
self,
1981-
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
1982+
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
19821983
dtypes: Optional[Dict[str, Any]] = None,
19831984
max_queue_size: Optional[int] = None,
19841985
) -> Iterator["pandas.DataFrame"]:

0 commit comments

Comments
 (0)