|
36 | 36 | from google.api_core.page_iterator import HTTPIterator
|
37 | 37 |
|
38 | 38 | import google.cloud._helpers
|
39 |
| -from google.cloud import bigquery_storage |
40 | 39 | from google.cloud.bigquery import _helpers
|
41 | 40 | from google.cloud.bigquery import _pandas_helpers
|
42 | 41 | from google.cloud.bigquery.exceptions import LegacyBigQueryStorageError
|
|
48 | 47 | from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration
|
49 | 48 |
|
50 | 49 | if typing.TYPE_CHECKING: # pragma: NO COVER
|
| 50 | + from google.cloud import bigquery_storage |
| 51 | + |
51 | 52 | # Unconditionally import optional dependencies again to tell pytype that
|
52 | 53 | # they are not None, avoiding false "no attribute" errors.
|
53 | 54 | import pandas
|
@@ -1628,7 +1629,7 @@ def _to_arrow_iterable(self, bqstorage_client=None):
|
1628 | 1629 | def to_arrow(
|
1629 | 1630 | self,
|
1630 | 1631 | progress_bar_type: str = None,
|
1631 |
| - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, |
| 1632 | + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, |
1632 | 1633 | create_bqstorage_client: bool = True,
|
1633 | 1634 | ) -> "pyarrow.Table":
|
1634 | 1635 | """[Beta] Create a class:`pyarrow.Table` by loading all pages of a
|
@@ -1723,7 +1724,7 @@ def to_arrow(
|
1723 | 1724 |
|
1724 | 1725 | def to_dataframe_iterable(
|
1725 | 1726 | self,
|
1726 |
| - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, |
| 1727 | + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, |
1727 | 1728 | dtypes: Dict[str, Any] = None,
|
1728 | 1729 | max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT,
|
1729 | 1730 | ) -> "pandas.DataFrame":
|
@@ -1797,7 +1798,7 @@ def to_dataframe_iterable(
|
1797 | 1798 | # changes to job.QueryJob.to_dataframe()
|
1798 | 1799 | def to_dataframe(
|
1799 | 1800 | self,
|
1800 |
| - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, |
| 1801 | + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, |
1801 | 1802 | dtypes: Dict[str, Any] = None,
|
1802 | 1803 | progress_bar_type: str = None,
|
1803 | 1804 | create_bqstorage_client: bool = True,
|
@@ -1978,7 +1979,7 @@ def to_dataframe(
|
1978 | 1979 |
|
1979 | 1980 | def to_dataframe_iterable(
|
1980 | 1981 | self,
|
1981 |
| - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, |
| 1982 | + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, |
1982 | 1983 | dtypes: Optional[Dict[str, Any]] = None,
|
1983 | 1984 | max_queue_size: Optional[int] = None,
|
1984 | 1985 | ) -> Iterator["pandas.DataFrame"]:
|
|
0 commit comments