Skip to content

Commit 6c1ab80

Browse files
authored
fix: type annotations include Optional when None is accepted (#1554)
Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [ ] Make sure to open an issue as a [bug/issue](https://togithub.com/googleapis/python-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) Fixes #1545 🦕
1 parent a5d86a3 commit 6c1ab80

File tree

8 files changed

+86
-73
lines changed

8 files changed

+86
-73
lines changed

google/cloud/bigquery/_job_helpers.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def query_jobs_insert(
6464
job_config: Optional[job.QueryJobConfig],
6565
job_id: Optional[str],
6666
job_id_prefix: Optional[str],
67-
location: str,
67+
location: Optional[str],
6868
project: str,
6969
retry: retries.Retry,
7070
timeout: Optional[float],
@@ -215,7 +215,7 @@ def query_jobs_query(
215215
client: "Client",
216216
query: str,
217217
job_config: Optional[job.QueryJobConfig],
218-
location: str,
218+
location: Optional[str],
219219
project: str,
220220
retry: retries.Retry,
221221
timeout: Optional[float],

google/cloud/bigquery/client.py

+55-53
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ def close(self):
307307

308308
def get_service_account_email(
309309
self,
310-
project: str = None,
310+
project: Optional[str] = None,
311311
retry: retries.Retry = DEFAULT_RETRY,
312312
timeout: TimeoutType = DEFAULT_TIMEOUT,
313313
) -> str:
@@ -355,7 +355,7 @@ def get_service_account_email(
355355
def list_projects(
356356
self,
357357
max_results: Optional[int] = None,
358-
page_token: str = None,
358+
page_token: Optional[str] = None,
359359
retry: retries.Retry = DEFAULT_RETRY,
360360
timeout: TimeoutType = DEFAULT_TIMEOUT,
361361
page_size: Optional[int] = None,
@@ -417,11 +417,11 @@ def api_request(*args, **kwargs):
417417

418418
def list_datasets(
419419
self,
420-
project: str = None,
420+
project: Optional[str] = None,
421421
include_all: bool = False,
422-
filter: str = None,
422+
filter: Optional[str] = None,
423423
max_results: Optional[int] = None,
424-
page_token: str = None,
424+
page_token: Optional[str] = None,
425425
retry: retries.Retry = DEFAULT_RETRY,
426426
timeout: TimeoutType = DEFAULT_TIMEOUT,
427427
page_size: Optional[int] = None,
@@ -498,7 +498,9 @@ def api_request(*args, **kwargs):
498498
page_size=page_size,
499499
)
500500

501-
def dataset(self, dataset_id: str, project: str = None) -> DatasetReference:
501+
def dataset(
502+
self, dataset_id: str, project: Optional[str] = None
503+
) -> DatasetReference:
502504
"""Deprecated: Construct a reference to a dataset.
503505
504506
.. deprecated:: 1.24.0
@@ -890,7 +892,7 @@ def set_iam_policy(
890892
self,
891893
table: Union[Table, TableReference, TableListItem, str],
892894
policy: Policy,
893-
updateMask: str = None,
895+
updateMask: Optional[str] = None,
894896
retry: retries.Retry = DEFAULT_RETRY,
895897
timeout: TimeoutType = DEFAULT_TIMEOUT,
896898
) -> Policy:
@@ -1350,7 +1352,7 @@ def list_models(
13501352
self,
13511353
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
13521354
max_results: Optional[int] = None,
1353-
page_token: str = None,
1355+
page_token: Optional[str] = None,
13541356
retry: retries.Retry = DEFAULT_RETRY,
13551357
timeout: TimeoutType = DEFAULT_TIMEOUT,
13561358
page_size: Optional[int] = None,
@@ -1427,7 +1429,7 @@ def list_routines(
14271429
self,
14281430
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
14291431
max_results: Optional[int] = None,
1430-
page_token: str = None,
1432+
page_token: Optional[str] = None,
14311433
retry: retries.Retry = DEFAULT_RETRY,
14321434
timeout: TimeoutType = DEFAULT_TIMEOUT,
14331435
page_size: Optional[int] = None,
@@ -1504,7 +1506,7 @@ def list_tables(
15041506
self,
15051507
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
15061508
max_results: Optional[int] = None,
1507-
page_token: str = None,
1509+
page_token: Optional[str] = None,
15081510
retry: retries.Retry = DEFAULT_RETRY,
15091511
timeout: TimeoutType = DEFAULT_TIMEOUT,
15101512
page_size: Optional[int] = None,
@@ -1862,9 +1864,9 @@ def _get_query_results(
18621864
self,
18631865
job_id: str,
18641866
retry: retries.Retry,
1865-
project: str = None,
1867+
project: Optional[str] = None,
18661868
timeout_ms: Optional[int] = None,
1867-
location: str = None,
1869+
location: Optional[str] = None,
18681870
timeout: TimeoutType = DEFAULT_TIMEOUT,
18691871
) -> _QueryResults:
18701872
"""Get the query results object for a query job.
@@ -2039,8 +2041,8 @@ def create_job(
20392041
def get_job(
20402042
self,
20412043
job_id: Union[str, job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob],
2042-
project: str = None,
2043-
location: str = None,
2044+
project: Optional[str] = None,
2045+
location: Optional[str] = None,
20442046
retry: retries.Retry = DEFAULT_RETRY,
20452047
timeout: TimeoutType = DEFAULT_TIMEOUT,
20462048
) -> Union[job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob, job.UnknownJob]:
@@ -2103,8 +2105,8 @@ def get_job(
21032105
def cancel_job(
21042106
self,
21052107
job_id: str,
2106-
project: str = None,
2107-
location: str = None,
2108+
project: Optional[str] = None,
2109+
location: Optional[str] = None,
21082110
retry: retries.Retry = DEFAULT_RETRY,
21092111
timeout: TimeoutType = DEFAULT_TIMEOUT,
21102112
) -> Union[job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob]:
@@ -2181,12 +2183,12 @@ def cancel_job(
21812183

21822184
def list_jobs(
21832185
self,
2184-
project: str = None,
2186+
project: Optional[str] = None,
21852187
parent_job: Optional[Union[QueryJob, str]] = None,
21862188
max_results: Optional[int] = None,
2187-
page_token: str = None,
2189+
page_token: Optional[str] = None,
21882190
all_users: bool = None,
2189-
state_filter: str = None,
2191+
state_filter: Optional[str] = None,
21902192
retry: retries.Retry = DEFAULT_RETRY,
21912193
timeout: TimeoutType = DEFAULT_TIMEOUT,
21922194
min_creation_time: datetime.datetime = None,
@@ -2297,11 +2299,11 @@ def load_table_from_uri(
22972299
self,
22982300
source_uris: Union[str, Sequence[str]],
22992301
destination: Union[Table, TableReference, TableListItem, str],
2300-
job_id: str = None,
2301-
job_id_prefix: str = None,
2302-
location: str = None,
2303-
project: str = None,
2304-
job_config: LoadJobConfig = None,
2302+
job_id: Optional[str] = None,
2303+
job_id_prefix: Optional[str] = None,
2304+
location: Optional[str] = None,
2305+
project: Optional[str] = None,
2306+
job_config: Optional[LoadJobConfig] = None,
23052307
retry: retries.Retry = DEFAULT_RETRY,
23062308
timeout: TimeoutType = DEFAULT_TIMEOUT,
23072309
) -> job.LoadJob:
@@ -2386,11 +2388,11 @@ def load_table_from_file(
23862388
rewind: bool = False,
23872389
size: Optional[int] = None,
23882390
num_retries: int = _DEFAULT_NUM_RETRIES,
2389-
job_id: str = None,
2390-
job_id_prefix: str = None,
2391-
location: str = None,
2392-
project: str = None,
2393-
job_config: LoadJobConfig = None,
2391+
job_id: Optional[str] = None,
2392+
job_id_prefix: Optional[str] = None,
2393+
location: Optional[str] = None,
2394+
project: Optional[str] = None,
2395+
job_config: Optional[LoadJobConfig] = None,
23942396
timeout: ResumableTimeoutType = DEFAULT_TIMEOUT,
23952397
) -> job.LoadJob:
23962398
"""Upload the contents of this table from a file-like object.
@@ -2494,11 +2496,11 @@ def load_table_from_dataframe(
24942496
dataframe: "pandas.DataFrame",
24952497
destination: Union[Table, TableReference, str],
24962498
num_retries: int = _DEFAULT_NUM_RETRIES,
2497-
job_id: str = None,
2498-
job_id_prefix: str = None,
2499-
location: str = None,
2500-
project: str = None,
2501-
job_config: LoadJobConfig = None,
2499+
job_id: Optional[str] = None,
2500+
job_id_prefix: Optional[str] = None,
2501+
location: Optional[str] = None,
2502+
project: Optional[str] = None,
2503+
job_config: Optional[LoadJobConfig] = None,
25022504
parquet_compression: str = "snappy",
25032505
timeout: ResumableTimeoutType = DEFAULT_TIMEOUT,
25042506
) -> job.LoadJob:
@@ -2751,11 +2753,11 @@ def load_table_from_json(
27512753
json_rows: Iterable[Dict[str, Any]],
27522754
destination: Union[Table, TableReference, TableListItem, str],
27532755
num_retries: int = _DEFAULT_NUM_RETRIES,
2754-
job_id: str = None,
2755-
job_id_prefix: str = None,
2756-
location: str = None,
2757-
project: str = None,
2758-
job_config: LoadJobConfig = None,
2756+
job_id: Optional[str] = None,
2757+
job_id_prefix: Optional[str] = None,
2758+
location: Optional[str] = None,
2759+
project: Optional[str] = None,
2760+
job_config: Optional[LoadJobConfig] = None,
27592761
timeout: ResumableTimeoutType = DEFAULT_TIMEOUT,
27602762
) -> job.LoadJob:
27612763
"""Upload the contents of a table from a JSON string or dict.
@@ -3064,10 +3066,10 @@ def copy_table(
30643066
Sequence[Union[Table, TableReference, TableListItem, str]],
30653067
],
30663068
destination: Union[Table, TableReference, TableListItem, str],
3067-
job_id: str = None,
3068-
job_id_prefix: str = None,
3069-
location: str = None,
3070-
project: str = None,
3069+
job_id: Optional[str] = None,
3070+
job_id_prefix: Optional[str] = None,
3071+
location: Optional[str] = None,
3072+
project: Optional[str] = None,
30713073
job_config: CopyJobConfig = None,
30723074
retry: retries.Retry = DEFAULT_RETRY,
30733075
timeout: TimeoutType = DEFAULT_TIMEOUT,
@@ -3170,10 +3172,10 @@ def extract_table(
31703172
self,
31713173
source: Union[Table, TableReference, TableListItem, Model, ModelReference, str],
31723174
destination_uris: Union[str, Sequence[str]],
3173-
job_id: str = None,
3174-
job_id_prefix: str = None,
3175-
location: str = None,
3176-
project: str = None,
3175+
job_id: Optional[str] = None,
3176+
job_id_prefix: Optional[str] = None,
3177+
location: Optional[str] = None,
3178+
project: Optional[str] = None,
31773179
job_config: ExtractJobConfig = None,
31783180
retry: retries.Retry = DEFAULT_RETRY,
31793181
timeout: TimeoutType = DEFAULT_TIMEOUT,
@@ -3270,10 +3272,10 @@ def query(
32703272
self,
32713273
query: str,
32723274
job_config: QueryJobConfig = None,
3273-
job_id: str = None,
3274-
job_id_prefix: str = None,
3275-
location: str = None,
3276-
project: str = None,
3275+
job_id: Optional[str] = None,
3276+
job_id_prefix: Optional[str] = None,
3277+
location: Optional[str] = None,
3278+
project: Optional[str] = None,
32773279
retry: retries.Retry = DEFAULT_RETRY,
32783280
timeout: TimeoutType = DEFAULT_TIMEOUT,
32793281
job_retry: retries.Retry = DEFAULT_JOB_RETRY,
@@ -3563,7 +3565,7 @@ def insert_rows_json(
35633565
] = AutoRowIDs.GENERATE_UUID,
35643566
skip_invalid_rows: bool = None,
35653567
ignore_unknown_values: bool = None,
3566-
template_suffix: str = None,
3568+
template_suffix: Optional[str] = None,
35673569
retry: retries.Retry = DEFAULT_RETRY,
35683570
timeout: TimeoutType = DEFAULT_TIMEOUT,
35693571
) -> Sequence[dict]:
@@ -3755,7 +3757,7 @@ def list_rows(
37553757
table: Union[Table, TableListItem, TableReference, str],
37563758
selected_fields: Sequence[SchemaField] = None,
37573759
max_results: Optional[int] = None,
3758-
page_token: str = None,
3760+
page_token: Optional[str] = None,
37593761
start_index: Optional[int] = None,
37603762
page_size: Optional[int] = None,
37613763
retry: retries.Retry = DEFAULT_RETRY,

google/cloud/bigquery/dataset.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ def from_api_repr(cls, resource: dict) -> "DatasetReference":
139139

140140
@classmethod
141141
def from_string(
142-
cls, dataset_id: str, default_project: str = None
142+
cls, dataset_id: str, default_project: Optional[str] = None
143143
) -> "DatasetReference":
144144
"""Construct a dataset reference from dataset ID string.
145145

google/cloud/bigquery/job/base.py

+16-5
Original file line numberDiff line numberDiff line change
@@ -703,7 +703,10 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None):
703703
self._set_properties(api_response)
704704

705705
def exists(
706-
self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
706+
self,
707+
client=None,
708+
retry: "retries.Retry" = DEFAULT_RETRY,
709+
timeout: Optional[float] = None,
707710
) -> bool:
708711
"""API call: test for the existence of the job via a GET request
709712
@@ -748,7 +751,10 @@ def exists(
748751
return True
749752

750753
def reload(
751-
self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
754+
self,
755+
client=None,
756+
retry: "retries.Retry" = DEFAULT_RETRY,
757+
timeout: Optional[float] = None,
752758
):
753759
"""API call: refresh job properties via a GET request.
754760
@@ -785,7 +791,10 @@ def reload(
785791
self._set_properties(api_response)
786792

787793
def cancel(
788-
self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
794+
self,
795+
client=None,
796+
retry: "retries.Retry" = DEFAULT_RETRY,
797+
timeout: Optional[float] = None,
789798
) -> bool:
790799
"""API call: cancel job via a POST request
791800
@@ -855,7 +864,7 @@ def _set_future_result(self):
855864
def done(
856865
self,
857866
retry: "retries.Retry" = DEFAULT_RETRY,
858-
timeout: float = None,
867+
timeout: Optional[float] = None,
859868
reload: bool = True,
860869
) -> bool:
861870
"""Checks if the job is complete.
@@ -881,7 +890,9 @@ def done(
881890
return self.state == _DONE_STATE
882891

883892
def result( # type: ignore # (signature complaint)
884-
self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
893+
self,
894+
retry: "retries.Retry" = DEFAULT_RETRY,
895+
timeout: Optional[float] = None,
885896
) -> "_AsyncJob":
886897
"""Start the job and wait for it to complete and get the result.
887898

google/cloud/bigquery/job/query.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -1317,7 +1317,7 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None):
13171317
raise
13181318

13191319
def _reload_query_results(
1320-
self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
1320+
self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: Optional[float] = None
13211321
):
13221322
"""Refresh the cached query results.
13231323
@@ -1405,7 +1405,7 @@ def result( # type: ignore # (complaints about the overloaded signature)
14051405
page_size: Optional[int] = None,
14061406
max_results: Optional[int] = None,
14071407
retry: "retries.Retry" = DEFAULT_RETRY,
1408-
timeout: float = None,
1408+
timeout: Optional[float] = None,
14091409
start_index: Optional[int] = None,
14101410
job_retry: "retries.Retry" = DEFAULT_JOB_RETRY,
14111411
) -> Union["RowIterator", _EmptyRowIterator]:
@@ -1557,7 +1557,7 @@ def do_get_result():
15571557
# that should only exist here in the QueryJob method.
15581558
def to_arrow(
15591559
self,
1560-
progress_bar_type: str = None,
1560+
progress_bar_type: Optional[str] = None,
15611561
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
15621562
create_bqstorage_client: bool = True,
15631563
max_results: Optional[int] = None,
@@ -1634,7 +1634,7 @@ def to_dataframe(
16341634
self,
16351635
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
16361636
dtypes: Dict[str, Any] = None,
1637-
progress_bar_type: str = None,
1637+
progress_bar_type: Optional[str] = None,
16381638
create_bqstorage_client: bool = True,
16391639
max_results: Optional[int] = None,
16401640
geography_as_object: bool = False,
@@ -1820,7 +1820,7 @@ def to_geodataframe(
18201820
self,
18211821
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
18221822
dtypes: Dict[str, Any] = None,
1823-
progress_bar_type: str = None,
1823+
progress_bar_type: Optional[str] = None,
18241824
create_bqstorage_client: bool = True,
18251825
max_results: Optional[int] = None,
18261826
geography_column: Optional[str] = None,

google/cloud/bigquery/routine/routine.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -537,7 +537,7 @@ def from_api_repr(cls, resource: dict) -> "RoutineReference":
537537

538538
@classmethod
539539
def from_string(
540-
cls, routine_id: str, default_project: str = None
540+
cls, routine_id: str, default_project: Optional[str] = None
541541
) -> "RoutineReference":
542542
"""Factory: construct a routine reference from routine ID string.
543543

0 commit comments

Comments
 (0)