Skip to content

Commit e1e8b7c

Browse files
committed
Google Cloud - Vertex_AI - Models - Upload_XGBoost_model component - Simplified the component after my Vertex SDK fixes were merged
Some of my Vertex SDK fixes: googleapis/python-aiplatform#779 googleapis/python-aiplatform#882 googleapis/python-aiplatform#943 googleapis/python-aiplatform#997
1 parent e7702ec commit e1e8b7c

File tree

2 files changed

+61
-80
lines changed

2 files changed

+61
-80
lines changed

components/google-cloud/Vertex_AI/Models/Upload_XGBoost_model/component.py

+29-39
Original file line numberDiff line numberDiff line change
@@ -18,58 +18,50 @@ def upload_XGBoost_model_to_Google_Cloud_Vertex_AI(
1818
# explanation_parameters: "google.cloud.aiplatform_v1.types.explanation.ExplanationParameters" = None,
1919

2020
project: str = None,
21-
location: str = "us-central1",
21+
location: str = None,
2222
labels: dict = None,
2323
# encryption_spec_key_name: str = None,
2424
staging_bucket: str = None,
2525
) -> NamedTuple("Outputs", [
2626
("model_name", "GoogleCloudVertexAiModelName"),
2727
("model_dict", dict),
2828
]):
29-
kwargs = locals()
30-
kwargs.pop("model_path")
31-
3229
import json
3330
import os
31+
import shutil
32+
import tempfile
3433
from google.cloud import aiplatform
3534

36-
# Problem: Unlike KFP, when running on Vertex AI, google.auth.default() returns incorrect GCP project ID.
37-
# This leads to failure when trying to create any resource in the project.
38-
# google.api_core.exceptions.PermissionDenied: 403 Permission 'aiplatform.models.upload' denied on resource '//aiplatform.googleapis.com/projects/gbd40bc90c7804989-tp/locations/us-central1' (or it may not exist).
39-
# We can try and get the GCP project ID/number from the environment variables.
40-
if not project:
41-
project_number = os.environ.get("CLOUD_ML_PROJECT_ID")
42-
if project_number:
43-
print(f"Inferred project number: {project_number}")
44-
kwargs["project"] = project_number
45-
# To improve the naming we try to convert the project number into the user project ID.
46-
try:
47-
from googleapiclient import discovery
48-
49-
cloud_resource_manager_service = discovery.build(
50-
"cloudresourcemanager", "v3"
51-
)
52-
project_id = (
53-
cloud_resource_manager_service.projects()
54-
.get(name=f"projects/{project_number}")
55-
.execute()["projectId"]
56-
)
57-
if project_id:
58-
print(f"Inferred project ID: {project_id}")
59-
kwargs["project"] = project_id
60-
except Exception as e:
61-
print(e)
62-
6335
if not location:
64-
kwargs["location"] = os.environ.get("CLOUD_ML_REGION")
36+
location = os.environ.get("CLOUD_ML_REGION")
6537

6638
if not labels:
67-
kwargs["labels"] = {}
68-
kwargs["labels"]["component-source"] = "g.yxqyang.asia-ark-kun-pipeline-components"
39+
labels = {}
40+
labels["component-source"] = "g.yxqyang.asia-ark-kun-pipeline-components"
41+
42+
# The serving container decides the model type based on the model file extension.
43+
# So we need to rename the mode file (e.g. /tmp/inputs/model/data) to *.pkl
44+
_, renamed_model_path = tempfile.mkstemp(suffix=".pkl")
45+
shutil.copyfile(src=model_path, dst=renamed_model_path)
6946

7047
model = aiplatform.Model.upload_xgboost_model_file(
71-
model_file_path=model_path,
72-
**kwargs,
48+
model_file_path=renamed_model_path,
49+
xgboost_version=xgboost_version,
50+
51+
display_name=display_name,
52+
description=description,
53+
54+
# instance_schema_uri=instance_schema_uri,
55+
# parameters_schema_uri=parameters_schema_uri,
56+
# prediction_schema_uri=prediction_schema_uri,
57+
# explanation_metadata=explanation_metadata,
58+
# explanation_parameters=explanation_parameters,
59+
60+
project=project,
61+
location=location,
62+
labels=labels,
63+
# encryption_spec_key_name=encryption_spec_key_name,
64+
staging_bucket=staging_bucket,
7365
)
7466
model_json = json.dumps(model.to_dict(), indent=2)
7567
print(model_json)
@@ -88,9 +80,7 @@ def upload_XGBoost_model_to_Google_Cloud_Vertex_AI(
8880
func=upload_XGBoost_model_to_Google_Cloud_Vertex_AI,
8981
base_image="python:3.9",
9082
packages_to_install=[
91-
# "google-cloud-aiplatform==1.6.2",
92-
"git+https://github.com/Ark-kun/python-aiplatform@8f61efb3a7903a6e0ef47d957f26ef3083581c7e#egg=google-cloud-aiplatform&subdirectory=.", # branch: feat--Support-uploading-local-models
93-
"google-api-python-client==2.29.0", # For project number -> project ID conversion
83+
"google-cloud-aiplatform==1.16.0",
9484
],
9585
annotations={
9686
"author": "Alexey Volkov <[email protected]>",

components/google-cloud/Vertex_AI/Models/Upload_XGBoost_model/component.yaml

+32-41
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ inputs:
77
- {name: display_name, type: String, optional: true}
88
- {name: description, type: String, optional: true}
99
- {name: project, type: String, optional: true}
10-
- {name: location, type: String, default: us-central1, optional: true}
10+
- {name: location, type: String, optional: true}
1111
- {name: labels, type: JsonObject, optional: true}
1212
- {name: staging_bucket, type: String, optional: true}
1313
outputs:
@@ -20,10 +20,9 @@ implementation:
2020
- sh
2121
- -c
2222
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
23-
'git+https://github.com/Ark-kun/python-aiplatform@8f61efb3a7903a6e0ef47d957f26ef3083581c7e#egg=google-cloud-aiplatform&subdirectory=.'
24-
'google-api-python-client==2.29.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3
25-
-m pip install --quiet --no-warn-script-location 'git+https://github.com/Ark-kun/python-aiplatform@8f61efb3a7903a6e0ef47d957f26ef3083581c7e#egg=google-cloud-aiplatform&subdirectory=.'
26-
'google-api-python-client==2.29.0' --user) && "$0" "$@"
23+
'google-cloud-aiplatform==1.16.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3
24+
-m pip install --quiet --no-warn-script-location 'google-cloud-aiplatform==1.16.0'
25+
--user) && "$0" "$@"
2726
- sh
2827
- -ec
2928
- |
@@ -46,55 +45,47 @@ implementation:
4645
# explanation_parameters: "google.cloud.aiplatform_v1.types.explanation.ExplanationParameters" = None,
4746
4847
project = None,
49-
location = "us-central1",
48+
location = None,
5049
labels = None,
5150
# encryption_spec_key_name: str = None,
5251
staging_bucket = None,
5352
):
54-
kwargs = locals()
55-
kwargs.pop("model_path")
56-
5753
import json
5854
import os
55+
import shutil
56+
import tempfile
5957
from google.cloud import aiplatform
6058
61-
# Problem: Unlike KFP, when running on Vertex AI, google.auth.default() returns incorrect GCP project ID.
62-
# This leads to failure when trying to create any resource in the project.
63-
# google.api_core.exceptions.PermissionDenied: 403 Permission 'aiplatform.models.upload' denied on resource '//aiplatform.googleapis.com/projects/gbd40bc90c7804989-tp/locations/us-central1' (or it may not exist).
64-
# We can try and get the GCP project ID/number from the environment variables.
65-
if not project:
66-
project_number = os.environ.get("CLOUD_ML_PROJECT_ID")
67-
if project_number:
68-
print(f"Inferred project number: {project_number}")
69-
kwargs["project"] = project_number
70-
# To improve the naming we try to convert the project number into the user project ID.
71-
try:
72-
from googleapiclient import discovery
73-
74-
cloud_resource_manager_service = discovery.build(
75-
"cloudresourcemanager", "v3"
76-
)
77-
project_id = (
78-
cloud_resource_manager_service.projects()
79-
.get(name=f"projects/{project_number}")
80-
.execute()["projectId"]
81-
)
82-
if project_id:
83-
print(f"Inferred project ID: {project_id}")
84-
kwargs["project"] = project_id
85-
except Exception as e:
86-
print(e)
87-
8859
if not location:
89-
kwargs["location"] = os.environ.get("CLOUD_ML_REGION")
60+
location = os.environ.get("CLOUD_ML_REGION")
9061
9162
if not labels:
92-
kwargs["labels"] = {}
93-
kwargs["labels"]["component-source"] = "g.yxqyang.asia-ark-kun-pipeline-components"
63+
labels = {}
64+
labels["component-source"] = "g.yxqyang.asia-ark-kun-pipeline-components"
65+
66+
# The serving container decides the model type based on the model file extension.
67+
# So we need to rename the mode file (e.g. /tmp/inputs/model/data) to *.pkl
68+
_, renamed_model_path = tempfile.mkstemp(suffix=".pkl")
69+
shutil.copyfile(src=model_path, dst=renamed_model_path)
9470
9571
model = aiplatform.Model.upload_xgboost_model_file(
96-
model_file_path=model_path,
97-
**kwargs,
72+
model_file_path=renamed_model_path,
73+
xgboost_version=xgboost_version,
74+
75+
display_name=display_name,
76+
description=description,
77+
78+
# instance_schema_uri=instance_schema_uri,
79+
# parameters_schema_uri=parameters_schema_uri,
80+
# prediction_schema_uri=prediction_schema_uri,
81+
# explanation_metadata=explanation_metadata,
82+
# explanation_parameters=explanation_parameters,
83+
84+
project=project,
85+
location=location,
86+
labels=labels,
87+
# encryption_spec_key_name=encryption_spec_key_name,
88+
staging_bucket=staging_bucket,
9889
)
9990
model_json = json.dumps(model.to_dict(), indent=2)
10091
print(model_json)

0 commit comments

Comments
 (0)