Skip to content

Commit 5fe515c

Browse files
authored
docs: fix typos (#1709)
1 parent 186872d commit 5fe515c

File tree

3 files changed

+7
-7
lines changed

3 files changed

+7
-7
lines changed

google/cloud/aiplatform/datasets/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ def export_data(self, output_dir: str) -> Sequence[str]:
577577
"""
578578
self.wait()
579579

580-
# TODO(b/171311614): Add support for BiqQuery export path
580+
# TODO(b/171311614): Add support for BigQuery export path
581581
export_data_config = gca_dataset.ExportDataConfig(
582582
gcs_destination=gca_io.GcsDestination(output_uri_prefix=output_dir)
583583
)

google/cloud/aiplatform/featurestore/entity_type.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -833,10 +833,10 @@ def _validate_and_get_import_feature_values_request(
833833
- A single Feature timestamp for all entities
834834
being imported. The timestamp must not have
835835
higher than millisecond precision.
836-
data_source (Union[gca_io.AvroSource, gca_io.BiqQuerySource, gca_io.CsvSource]):
836+
data_source (Union[gca_io.AvroSource, gca_io.BigQuerySource, gca_io.CsvSource]):
837837
Required. The data_source can be one of:
838838
- AvroSource
839-
- BiqQuerySource
839+
- BigQuerySource
840840
- CsvSource
841841
feature_source_fields (Dict[str, str]):
842842
Optional. User defined dictionary to map ID of the Feature for importing values

google/cloud/aiplatform/training_jobs.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3012,7 +3012,7 @@ def run(
30123012
Private services access must already be configured for the network.
30133013
If left unspecified, the job is not peered with any network.
30143014
bigquery_destination (str):
3015-
Provide this field if `dataset` is a BiqQuery dataset.
3015+
Provide this field if `dataset` is a BigQuery dataset.
30163016
The BigQuery project location where the training data is to
30173017
be written to. In the given project a new dataset is created
30183018
with name
@@ -3328,7 +3328,7 @@ def _run(
33283328
Private services access must already be configured for the network.
33293329
If left unspecified, the job is not peered with any network.
33303330
bigquery_destination (str):
3331-
Provide this field if `dataset` is a BiqQuery dataset.
3331+
Provide this field if `dataset` is a BigQuery dataset.
33323332
The BigQuery project location where the training data is to
33333333
be written to. In the given project a new dataset is created
33343334
with name
@@ -3921,7 +3921,7 @@ def run(
39213921
Private services access must already be configured for the network.
39223922
If left unspecified, the job is not peered with any network.
39233923
bigquery_destination (str):
3924-
Provide this field if `dataset` is a BiqQuery dataset.
3924+
Provide this field if `dataset` is a BigQuery dataset.
39253925
The BigQuery project location where the training data is to
39263926
be written to. In the given project a new dataset is created
39273927
with name
@@ -6348,7 +6348,7 @@ def run(
63486348
Private services access must already be configured for the network.
63496349
If left unspecified, the job is not peered with any network.
63506350
bigquery_destination (str):
6351-
Provide this field if `dataset` is a BiqQuery dataset.
6351+
Provide this field if `dataset` is a BigQuery dataset.
63526352
The BigQuery project location where the training data is to
63536353
be written to. In the given project a new dataset is created
63546354
with name

0 commit comments

Comments
 (0)