diff --git a/google/cloud/aiplatform/datasets/_datasources.py b/google/cloud/aiplatform/datasets/_datasources.py index 2ca2c02bfd..5fc51c03f6 100644 --- a/google/cloud/aiplatform/datasets/_datasources.py +++ b/google/cloud/aiplatform/datasets/_datasources.py @@ -121,10 +121,9 @@ def __init__( Args: gcs_source (Union[str, Sequence[str]]): Required. The Google Cloud Storage location for the input content. - Google Cloud Storage URI(-s) to the input file(s). May contain - wildcards. For more information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + Google Cloud Storage URI(-s) to the input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] import_schema_uri (str): @@ -185,10 +184,9 @@ def create_datasource( `OpenAPI 3.0.2 Schema gcs_source (Union[str, Sequence[str]]): The Google Cloud Storage location for the input content. - Google Cloud Storage URI(-s) to the input file(s). May contain - wildcards. For more information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + Google Cloud Storage URI(-s) to the input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] bq_source (str): diff --git a/google/cloud/aiplatform/datasets/image_dataset.py b/google/cloud/aiplatform/datasets/image_dataset.py index 4d6db86a7a..b7f7954278 100644 --- a/google/cloud/aiplatform/datasets/image_dataset.py +++ b/google/cloud/aiplatform/datasets/image_dataset.py @@ -59,10 +59,9 @@ def create( of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): Google Cloud Storage URI(-s) to the - input file(s). May contain wildcards. For more - information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] import_schema_uri (str): diff --git a/google/cloud/aiplatform/datasets/tabular_dataset.py b/google/cloud/aiplatform/datasets/tabular_dataset.py index 732cebe26f..f4366e4a24 100644 --- a/google/cloud/aiplatform/datasets/tabular_dataset.py +++ b/google/cloud/aiplatform/datasets/tabular_dataset.py @@ -63,10 +63,9 @@ def create( of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): Google Cloud Storage URI(-s) to the - input file(s). May contain wildcards. For more - information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] bq_source (str): diff --git a/google/cloud/aiplatform/datasets/text_dataset.py b/google/cloud/aiplatform/datasets/text_dataset.py index 64fab743b3..f74fb76bb7 100644 --- a/google/cloud/aiplatform/datasets/text_dataset.py +++ b/google/cloud/aiplatform/datasets/text_dataset.py @@ -66,10 +66,9 @@ def create( of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): Google Cloud Storage URI(-s) to the - input file(s). May contain wildcards. For more - information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] import_schema_uri (str): diff --git a/google/cloud/aiplatform/datasets/time_series_dataset.py b/google/cloud/aiplatform/datasets/time_series_dataset.py index 6bde6be7a5..6cc48e2558 100644 --- a/google/cloud/aiplatform/datasets/time_series_dataset.py +++ b/google/cloud/aiplatform/datasets/time_series_dataset.py @@ -57,10 +57,9 @@ def create( of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): Google Cloud Storage URI(-s) to the - input file(s). May contain wildcards. For more - information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] bq_source (str): diff --git a/google/cloud/aiplatform/datasets/video_dataset.py b/google/cloud/aiplatform/datasets/video_dataset.py index c04f136d2d..bef719b17b 100644 --- a/google/cloud/aiplatform/datasets/video_dataset.py +++ b/google/cloud/aiplatform/datasets/video_dataset.py @@ -59,10 +59,9 @@ def create( of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): Google Cloud Storage URI(-s) to the - input file(s). May contain wildcards. For more - information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. - examples: + input file(s). + + Examples: str: "gs://bucket/file.csv" Sequence[str]: ["gs://bucket/file1.csv", "gs://bucket/file2.csv"] import_schema_uri (str): diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 2783eaaae6..ab24afb171 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -409,8 +409,7 @@ def create( gcs_source (Optional[Sequence[str]]): Google Cloud Storage URI(-s) to your instances to run batch prediction on. They must match `instances_format`. - May contain wildcards. For more information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. + bigquery_source (Optional[str]): BigQuery URI to a table, up to 2000 characters long. For example: `bq://projectId.bqDatasetId.bqTableId` diff --git a/google/cloud/aiplatform/matching_engine/matching_engine_index.py b/google/cloud/aiplatform/matching_engine/matching_engine_index.py index 82f37bae9e..d382a126f1 100644 --- a/google/cloud/aiplatform/matching_engine/matching_engine_index.py +++ b/google/cloud/aiplatform/matching_engine/matching_engine_index.py @@ -302,7 +302,7 @@ def update_embeddings( The expected structure and format of the files this URI points to is described at https://docs.google.com/document/d/12DLVB6Nq6rdv8grxfBsPhUA283KWrQ9ZenPBp0zUC30 - is_complete_overwrite (str): + is_complete_overwrite (bool): Optional. If this field is set together with contentsDeltaUri when calling IndexService.UpdateIndex, then existing content of the Index will be replaced by the data from the contentsDeltaUri. request_metadata (Sequence[Tuple[str, str]]): diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index 3b4b03af3e..93243b5678 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -2491,8 +2491,6 @@ def batch_predict( gcs_source: Optional[Sequence[str]] = None Google Cloud Storage URI(-s) to your instances to run batch prediction on. They must match `instances_format`. - May contain wildcards. For more information on wildcards, see - https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. bigquery_source: Optional[str] = None BigQuery URI to a table, up to 2000 characters long. For example: `bq://projectId.bqDatasetId.bqTableId`