Skip to content

Commit 235a6d1

Browse files
authored
Merge branch 'main' into fix--Fixed-getitng-project-ID-when-running-on-Vertex-AI
2 parents 692ae9e + 79aeec1 commit 235a6d1

39 files changed

+621
-80
lines changed

.github/CODEOWNERS

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
# For syntax help see:
55
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
66

7-
# yoshi-python is the default owner
8-
* @googleapis/yoshi-python
7+
# @googleapis/cdpe-cloudai and yoshi-python are the default owners
8+
* @googleapis/cdpe-cloudai @googleapis/yoshi-python
99

1010
# The AI Platform GAPIC libraries are owned by Cloud AI DPE
1111
/google/cloud/aiplatform_*/** @googleapis/cdpe-cloudai

README.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ Initialize the SDK to store common configurations that you use with the SDK.
111111
staging_bucket='gs://my_staging_bucket',
112112
113113
# custom google.auth.credentials.Credentials
114-
# environment default creds used if not set
114+
# environment default credentials used if not set
115115
credentials=my_credentials,
116116
117117
# customer managed encryption key resource name
@@ -188,7 +188,7 @@ Please visit `Using a managed dataset in a custom training application`_ for a d
188188

189189
.. _Using a managed dataset in a custom training application: https://cloud.google.com/vertex-ai/docs/training/using-managed-datasets
190190

191-
It must write the model artifact to the environment variable populated by the traing service:
191+
It must write the model artifact to the environment variable populated by the training service:
192192

193193
.. code-block:: Python
194194

google/cloud/aiplatform/base.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def log_create_complete(
9898
cls (VertexAiResourceNoun):
9999
Vertex AI Resource Noun class that is being created.
100100
resource (proto.Message):
101-
Vertex AI Resourc proto.Message
101+
Vertex AI Resource proto.Message
102102
variable_name (str): Name of variable to use for code snippet
103103
"""
104104
self._logger.info(f"{cls.__name__} created. Resource name: {resource.name}")
@@ -121,7 +121,7 @@ def log_create_complete_with_getter(
121121
cls (VertexAiResourceNoun):
122122
Vertex AI Resource Noun class that is being created.
123123
resource (proto.Message):
124-
Vertex AI Resourc proto.Message
124+
Vertex AI Resource proto.Message
125125
variable_name (str): Name of variable to use for code snippet
126126
"""
127127
self._logger.info(f"{cls.__name__} created. Resource name: {resource.name}")
@@ -462,7 +462,7 @@ def __init__(
462462
Args:
463463
project(str): Project of the resource noun.
464464
location(str): The location of the resource noun.
465-
credentials(google.auth.crendentials.Crendentials): Optional custom
465+
credentials(google.auth.credentials.Credentials): Optional custom
466466
credentials to use when accessing interacting with resource noun.
467467
resource_name(str): A fully-qualified resource name or ID.
468468
"""
@@ -655,6 +655,15 @@ def gca_resource(self) -> proto.Message:
655655
self._assert_gca_resource_is_available()
656656
return self._gca_resource
657657

658+
@property
659+
def _resource_is_available(self) -> bool:
660+
"""Returns True if GCA resource has been created and is available, otherwise False"""
661+
try:
662+
self._assert_gca_resource_is_available()
663+
return True
664+
except RuntimeError:
665+
return False
666+
658667
def _assert_gca_resource_is_available(self) -> None:
659668
"""Helper method to raise when property is not accessible.
660669
@@ -833,7 +842,7 @@ def __init__(
833842
Args:
834843
project (str): Optional. Project of the resource noun.
835844
location (str): Optional. The location of the resource noun.
836-
credentials(google.auth.crendentials.Crendentials):
845+
credentials(google.auth.credentials.Credentials):
837846
Optional. custom credentials to use when accessing interacting with
838847
resource noun.
839848
resource_name(str): A fully-qualified resource name or ID.
@@ -863,7 +872,7 @@ def _empty_constructor(
863872
Args:
864873
project (str): Optional. Project of the resource noun.
865874
location (str): Optional. The location of the resource noun.
866-
credentials(google.auth.crendentials.Crendentials):
875+
credentials(google.auth.credentials.Credentials):
867876
Optional. custom credentials to use when accessing interacting with
868877
resource noun.
869878
resource_name(str): A fully-qualified resource name or ID.
@@ -1153,7 +1162,7 @@ def delete(self, sync: bool = True) -> None:
11531162
_LOGGER.log_action_completed_against_resource("deleted.", "", self)
11541163

11551164
def __repr__(self) -> str:
1156-
if self._gca_resource:
1165+
if self._gca_resource and self._resource_is_available:
11571166
return VertexAiResourceNoun.__repr__(self)
11581167

11591168
return FutureManager.__repr__(self)

google/cloud/aiplatform/datasets/dataset.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def create(
164164
be picked randomly. Two DataItems are considered identical
165165
if their content bytes are identical (e.g. image bytes or
166166
pdf bytes). These labels will be overridden by Annotation
167-
labels specified inside index file refenced by
167+
labels specified inside index file referenced by
168168
``import_schema_uri``,
169169
e.g. jsonl file.
170170
project (str):
@@ -488,7 +488,7 @@ def import_data(
488488
be picked randomly. Two DataItems are considered identical
489489
if their content bytes are identical (e.g. image bytes or
490490
pdf bytes). These labels will be overridden by Annotation
491-
labels specified inside index file refenced by
491+
labels specified inside index file referenced by
492492
``import_schema_uri``,
493493
e.g. jsonl file.
494494
sync (bool):

google/cloud/aiplatform/datasets/image_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def create(
8282
be picked randomly. Two DataItems are considered identical
8383
if their content bytes are identical (e.g. image bytes or
8484
pdf bytes). These labels will be overridden by Annotation
85-
labels specified inside index file refenced by
85+
labels specified inside index file referenced by
8686
``import_schema_uri``,
8787
e.g. jsonl file.
8888
project (str):

google/cloud/aiplatform/datasets/text_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def create(
8989
be picked randomly. Two DataItems are considered identical
9090
if their content bytes are identical (e.g. image bytes or
9191
pdf bytes). These labels will be overridden by Annotation
92-
labels specified inside index file refenced by
92+
labels specified inside index file referenced by
9393
``import_schema_uri``,
9494
e.g. jsonl file.
9595
project (str):

google/cloud/aiplatform/datasets/video_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def create(
8282
be picked randomly. Two DataItems are considered identical
8383
if their content bytes are identical (e.g. image bytes or
8484
pdf bytes). These labels will be overridden by Annotation
85-
labels specified inside index file refenced by
85+
labels specified inside index file referenced by
8686
``import_schema_uri``,
8787
e.g. jsonl file.
8888
project (str):

google/cloud/aiplatform/explain/lit.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,23 +29,23 @@
2929
except ImportError:
3030
raise ImportError(
3131
"LIT is not installed and is required to get Dataset as the return format. "
32-
'Please install the SDK using "pip install python-aiplatform[lit]"'
32+
'Please install the SDK using "pip install google-cloud-aiplatform[lit]"'
3333
)
3434

3535
try:
3636
import tensorflow as tf
3737
except ImportError:
3838
raise ImportError(
3939
"Tensorflow is not installed and is required to load saved model. "
40-
'Please install the SDK using "pip install pip install python-aiplatform[lit]"'
40+
'Please install the SDK using "pip install google-cloud-aiplatform[lit]"'
4141
)
4242

4343
try:
4444
import pandas as pd
4545
except ImportError:
4646
raise ImportError(
4747
"Pandas is not installed and is required to read the dataset. "
48-
'Please install Pandas using "pip install python-aiplatform[lit]"'
48+
'Please install Pandas using "pip install google-cloud-aiplatform[lit]"'
4949
)
5050

5151

google/cloud/aiplatform/jobs.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ def output_info(self,) -> Optional[aiplatform.gapic.BatchPredictionJob.OutputInf
325325
"""Information describing the output of this job, including output location
326326
into which prediction output is written.
327327
328-
This is only available for batch predicition jobs that have run successfully.
328+
This is only available for batch prediction jobs that have run successfully.
329329
"""
330330
self._assert_gca_resource_is_available()
331331
return self._gca_resource.output_info
@@ -839,7 +839,7 @@ def __init__(
839839
Args:
840840
project(str): Project of the resource noun.
841841
location(str): The location of the resource noun.
842-
credentials(google.auth.crendentials.Crendentials): Optional custom
842+
credentials(google.auth.credentials.Credentials): Optional custom
843843
credentials to use when accessing interacting with resource noun.
844844
"""
845845

@@ -1023,7 +1023,7 @@ def __init__(
10231023
encryption_spec_key_name: Optional[str] = None,
10241024
staging_bucket: Optional[str] = None,
10251025
):
1026-
"""Cosntruct a Custom Job with Worker Pool Specs.
1026+
"""Constructs a Custom Job with Worker Pool Specs.
10271027
10281028
```
10291029
Example usage:
@@ -1569,7 +1569,7 @@ def __init__(
15691569
Required. Configured CustomJob. The worker pool spec from this custom job
15701570
applies to the CustomJobs created in all the trials.
15711571
metric_spec: Dict[str, str]
1572-
Required. Dicionary representing metrics to optimize. The dictionary key is the metric_id,
1572+
Required. Dictionary representing metrics to optimize. The dictionary key is the metric_id,
15731573
which is reported by your training job, and the dictionary value is the
15741574
optimization goal of the metric('minimize' or 'maximize'). example:
15751575
@@ -1594,7 +1594,7 @@ def __init__(
15941594
DoubleParameterSpec, IntegerParameterSpec, CategoricalParameterSpace, DiscreteParameterSpec
15951595
15961596
max_trial_count (int):
1597-
Reuired. The desired total number of Trials.
1597+
Required. The desired total number of Trials.
15981598
parallel_trial_count (int):
15991599
Required. The desired number of Trials to run in parallel.
16001600
max_failed_trial_count (int):

google/cloud/aiplatform/matching_engine/matching_engine_index_endpoint.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -535,6 +535,8 @@ def deploy_index(
535535
MatchingEngineIndexEndpoint - IndexEndpoint resource object
536536
"""
537537

538+
self.wait()
539+
538540
_LOGGER.log_action_start_against_resource(
539541
"Deploying index", "index_endpoint", self,
540542
)
@@ -591,6 +593,8 @@ def undeploy_index(
591593
MatchingEngineIndexEndpoint - IndexEndpoint resource object
592594
"""
593595

596+
self.wait()
597+
594598
_LOGGER.log_action_start_against_resource(
595599
"Undeploying index", "index_endpoint", self,
596600
)
@@ -652,6 +656,8 @@ def mutate_deployed_index(
652656
Optional. Strings which should be sent along with the request as metadata.
653657
"""
654658

659+
self.wait()
660+
655661
_LOGGER.log_action_start_against_resource(
656662
"Mutating index", "index_endpoint", self,
657663
)
@@ -691,6 +697,7 @@ def deployed_indexes(
691697
Returns:
692698
List[gca_matching_engine_index_endpoint.DeployedIndex] - Deployed indexes
693699
"""
700+
self._assert_gca_resource_is_available()
694701
return self._gca_resource.deployed_indexes
695702

696703
@base.optional_sync()

google/cloud/aiplatform/metadata/metadata.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def log_metrics(self, metrics: Dict[str, Union[float, int]]):
155155
156156
Args:
157157
metrics (Dict):
158-
Required. Metrics key/value pairs. Only flot and int are supported format for value.
158+
Required. Metrics key/value pairs. Only float and int are supported format for value.
159159
Raises:
160160
TypeError: If value contains unsupported types.
161161
ValueError: If Experiment or Run is not set.
@@ -263,7 +263,7 @@ def _validate_metrics_value_type(metrics: Dict[str, Union[float, int]]):
263263
264264
Args:
265265
metrics (Dict):
266-
Required. Metrics key/value pairs. Only flot and int are supported format for value.
266+
Required. Metrics key/value pairs. Only float and int are supported format for value.
267267
Raises:
268268
TypeError: If value contains unsupported types.
269269
"""

google/cloud/aiplatform/models.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1575,6 +1575,8 @@ def update(
15751575
ValueError: If `labels` is not the correct format.
15761576
"""
15771577

1578+
self.wait()
1579+
15781580
current_model_proto = self.gca_resource
15791581
copied_model_proto = current_model_proto.__class__(current_model_proto)
15801582

@@ -2496,6 +2498,8 @@ def export_model(
24962498
ValueError: If invalid arguments or export formats are provided.
24972499
"""
24982500

2501+
self.wait()
2502+
24992503
# Model does not support exporting
25002504
if not self.supported_export_formats:
25012505
raise ValueError(f"The model `{self.resource_name}` is not exportable.")

google/cloud/aiplatform/tensorboard/uploader_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ def get_or_create(
377377
378378
Returns:
379379
time_series (tensorboard_time_series.TensorboardTimeSeries):
380-
A new or existing tensorboard_time_series.TensorbaordTimeSeries.
380+
A new or existing tensorboard_time_series.TensorboardTimeSeries.
381381
382382
Raises:
383383
exceptions.InvalidArgument:

google/cloud/aiplatform/training_jobs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -435,7 +435,7 @@ def _create_input_data_config(
435435
- AIP_TEST_DATA_URI = "bigquery_destination.dataset_*.test"
436436
Raises:
437437
ValueError: When more than 1 type of split configuration is passed or when
438-
the split configuartion passed is incompatible with the dataset schema.
438+
the split configuration passed is incompatible with the dataset schema.
439439
"""
440440

441441
input_data_config = None
@@ -5811,7 +5811,7 @@ def __init__(
58115811
multiple objects in shots and segments. You can use these
58125812
models to track objects in your videos according to your
58135813
own pre-defined, custom labels.
5814-
"action_recognition" - A video action reconition model pinpoints
5814+
"action_recognition" - A video action recognition model pinpoints
58155815
the location of actions with short temporal durations (~1 second).
58165816
model_type: str = "CLOUD"
58175817
Required. One of the following:

google/cloud/aiplatform/utils/pipeline_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def _get_vertex_value(
155155
inputs, or value is none.
156156
"""
157157
if value is None:
158-
raise ValueError("None values should be filterd out.")
158+
raise ValueError("None values should be filtered out.")
159159

160160
if name not in self._parameter_types:
161161
raise ValueError(

google/cloud/aiplatform/utils/source_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ class _TrainingScriptPythonPackager:
7171
packager = TrainingScriptPythonPackager('my_script.py', ['pandas', 'pytorch'])
7272
gcs_path = packager.package_and_copy_to_gcs(
7373
gcs_staging_dir='my-bucket',
74-
project='my-prject')
74+
project='my-project')
7575
module_name = packager.module_name
7676
7777
The package after installed can be executed as:

google/cloud/aiplatform/utils/worker_spec_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ def chief_worker_pool(
186186
reduction_server_replica_count: int = 0,
187187
reduction_server_machine_type: str = None,
188188
) -> "_DistributedTrainingSpec":
189-
"""Parameterizes Config to support only chief with worker replicas.
189+
"""Parametrizes Config to support only chief with worker replicas.
190190
191191
For replica is assigned to chief and the remainder to workers. All spec have the
192192
same machine type, accelerator count, and accelerator type.

google/cloud/aiplatform_v1/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@
131131
from .types.feature_selector import FeatureSelector
132132
from .types.feature_selector import IdMatcher
133133
from .types.featurestore import Featurestore
134+
from .types.featurestore_monitoring import FeaturestoreMonitoringConfig
134135
from .types.featurestore_online_service import FeatureValue
135136
from .types.featurestore_online_service import FeatureValueList
136137
from .types.featurestore_online_service import ReadFeatureValuesRequest
@@ -672,6 +673,7 @@
672673
"FeatureValueDestination",
673674
"FeatureValueList",
674675
"Featurestore",
676+
"FeaturestoreMonitoringConfig",
675677
"FeaturestoreOnlineServingServiceClient",
676678
"FeaturestoreServiceClient",
677679
"FilterSplit",

google/cloud/aiplatform_v1/services/featurestore_service/async_client.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
from google.cloud.aiplatform_v1.types import feature as gca_feature
4242
from google.cloud.aiplatform_v1.types import featurestore
4343
from google.cloud.aiplatform_v1.types import featurestore as gca_featurestore
44+
from google.cloud.aiplatform_v1.types import featurestore_monitoring
4445
from google.cloud.aiplatform_v1.types import featurestore_service
4546
from google.cloud.aiplatform_v1.types import operation as gca_operation
4647
from google.protobuf import empty_pb2 # type: ignore
@@ -1202,7 +1203,12 @@ def sample_update_entity_type():
12021203
- ``description``
12031204
- ``labels``
12041205
- ``monitoring_config.snapshot_analysis.disabled``
1205-
- ``monitoring_config.snapshot_analysis.monitoring_interval``
1206+
- ``monitoring_config.snapshot_analysis.monitoring_interval_days``
1207+
- ``monitoring_config.snapshot_analysis.staleness_days``
1208+
- ``monitoring_config.import_features_analysis.state``
1209+
- ``monitoring_config.import_features_analysis.anomaly_detection_baseline``
1210+
- ``monitoring_config.numerical_threshold_config.value``
1211+
- ``monitoring_config.categorical_threshold_config.value``
12061212
12071213
This corresponds to the ``update_mask`` field
12081214
on the ``request`` instance; if ``request`` is provided, this
@@ -1912,8 +1918,7 @@ def sample_update_feature():
19121918
19131919
- ``description``
19141920
- ``labels``
1915-
- ``monitoring_config.snapshot_analysis.disabled``
1916-
- ``monitoring_config.snapshot_analysis.monitoring_interval``
1921+
- ``disable_monitoring``
19171922
19181923
This corresponds to the ``update_mask`` field
19191924
on the ``request`` instance; if ``request`` is provided, this

0 commit comments

Comments
 (0)