Skip to content

Commit 1c198f1

Browse files
feat: add model_version_id to UploadModelResponse in aiplatform v1 model_service.proto (#1442)
* feat: add model_version_id to UploadModelResponse in aiplatform v1 model_service.proto PiperOrigin-RevId: 455486702 Source-Link: googleapis/googleapis@f2e5538 Source-Link: googleapis/googleapis-gen@1520022 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTUyMDAyMjNiMTU3Y2I5ZTc2Yzc5MjNiYzM4N2RkNmU5YzBhODNiZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent e138cfd commit 1c198f1

File tree

11 files changed

+97
-26
lines changed

11 files changed

+97
-26
lines changed

.kokoro/samples/python3.6/common.cfg

+40
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
# Build logs will be here
4+
action {
5+
define_artifacts {
6+
regex: "**/*sponge_log.xml"
7+
}
8+
}
9+
10+
# Specify which tests to run
11+
env_vars: {
12+
key: "RUN_TESTS_SESSION"
13+
value: "py-3.6"
14+
}
15+
16+
# Declare build specific Cloud project.
17+
env_vars: {
18+
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
19+
value: "ucaip-sample-tests"
20+
}
21+
22+
env_vars: {
23+
key: "TRAMPOLINE_BUILD_FILE"
24+
value: "github/python-aiplatform/.kokoro/test-samples.sh"
25+
}
26+
27+
# Configure the docker image for kokoro-trampoline.
28+
env_vars: {
29+
key: "TRAMPOLINE_IMAGE"
30+
value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
31+
}
32+
33+
# Download secrets for samples
34+
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
35+
36+
# Download trampoline resources.
37+
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
38+
39+
# Use the trampoline script to run in docker.
40+
build_file: "python-aiplatform/.kokoro/trampoline_v2.sh"
+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
env_vars: {
4+
key: "INSTALL_LIBRARY_FROM_SOURCE"
5+
value: "True"
6+
}
7+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
env_vars: {
4+
key: "INSTALL_LIBRARY_FROM_SOURCE"
5+
value: "True"
6+
}
7+
8+
env_vars: {
9+
key: "TRAMPOLINE_BUILD_FILE"
10+
value: "github/python-aiplatform/.kokoro/test-samples-against-head.sh"
11+
}
+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
env_vars: {
4+
key: "INSTALL_LIBRARY_FROM_SOURCE"
5+
value: "True"
6+
}

google/cloud/aiplatform_v1/services/migration_service/client.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -218,40 +218,40 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
218218
@staticmethod
219219
def dataset_path(
220220
project: str,
221+
location: str,
221222
dataset: str,
222223
) -> str:
223224
"""Returns a fully-qualified dataset string."""
224-
return "projects/{project}/datasets/{dataset}".format(
225+
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
225226
project=project,
227+
location=location,
226228
dataset=dataset,
227229
)
228230

229231
@staticmethod
230232
def parse_dataset_path(path: str) -> Dict[str, str]:
231233
"""Parses a dataset path into its component segments."""
232-
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
234+
m = re.match(
235+
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
236+
path,
237+
)
233238
return m.groupdict() if m else {}
234239

235240
@staticmethod
236241
def dataset_path(
237242
project: str,
238-
location: str,
239243
dataset: str,
240244
) -> str:
241245
"""Returns a fully-qualified dataset string."""
242-
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
246+
return "projects/{project}/datasets/{dataset}".format(
243247
project=project,
244-
location=location,
245248
dataset=dataset,
246249
)
247250

248251
@staticmethod
249252
def parse_dataset_path(path: str) -> Dict[str, str]:
250253
"""Parses a dataset path into its component segments."""
251-
m = re.match(
252-
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
253-
path,
254-
)
254+
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
255255
return m.groupdict() if m else {}
256256

257257
@staticmethod

google/cloud/aiplatform_v1/types/model_service.py

+7
Original file line numberDiff line numberDiff line change
@@ -121,12 +121,19 @@ class UploadModelResponse(proto.Message):
121121
model (str):
122122
The name of the uploaded Model resource. Format:
123123
``projects/{project}/locations/{location}/models/{model}``
124+
model_version_id (str):
125+
Output only. The version ID of the model that
126+
is uploaded.
124127
"""
125128

126129
model = proto.Field(
127130
proto.STRING,
128131
number=1,
129132
)
133+
model_version_id = proto.Field(
134+
proto.STRING,
135+
number=2,
136+
)
130137

131138

132139
class GetModelRequest(proto.Message):

mypy.ini

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
[mypy]
2-
python_version = 3.7
2+
python_version = 3.6
33
namespace_packages = True

samples/model-builder/noxfile.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
8989

9090
# DO NOT EDIT - automatically generated.
9191
# All versions used to test samples.
92-
ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
92+
ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
9393

9494
# Any default versions that should be ignored.
9595
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]

samples/snippets/noxfile.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
8989

9090
# DO NOT EDIT - automatically generated.
9191
# All versions used to test samples.
92-
ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
92+
ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
9393

9494
# Any default versions that should be ignored.
9595
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]

scripts/readme-gen/templates/install_deps.tmpl.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ Install Dependencies
1212
.. _Python Development Environment Setup Guide:
1313
https://cloud.google.com/python/setup
1414

15-
#. Create a virtualenv. Samples are compatible with Python 3.7+.
15+
#. Create a virtualenv. Samples are compatible with Python 3.6+.
1616

1717
.. code-block:: bash
1818

tests/unit/gapic/aiplatform_v1/test_migration_service.py

+13-13
Original file line numberDiff line numberDiff line change
@@ -1979,19 +1979,22 @@ def test_parse_dataset_path():
19791979

19801980
def test_dataset_path():
19811981
project = "squid"
1982-
dataset = "clam"
1983-
expected = "projects/{project}/datasets/{dataset}".format(
1982+
location = "clam"
1983+
dataset = "whelk"
1984+
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
19841985
project=project,
1986+
location=location,
19851987
dataset=dataset,
19861988
)
1987-
actual = MigrationServiceClient.dataset_path(project, dataset)
1989+
actual = MigrationServiceClient.dataset_path(project, location, dataset)
19881990
assert expected == actual
19891991

19901992

19911993
def test_parse_dataset_path():
19921994
expected = {
1993-
"project": "whelk",
1994-
"dataset": "octopus",
1995+
"project": "octopus",
1996+
"location": "oyster",
1997+
"dataset": "nudibranch",
19951998
}
19961999
path = MigrationServiceClient.dataset_path(**expected)
19972000

@@ -2001,22 +2004,19 @@ def test_parse_dataset_path():
20012004

20022005

20032006
def test_dataset_path():
2004-
project = "oyster"
2005-
location = "nudibranch"
2006-
dataset = "cuttlefish"
2007-
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
2007+
project = "cuttlefish"
2008+
dataset = "mussel"
2009+
expected = "projects/{project}/datasets/{dataset}".format(
20082010
project=project,
2009-
location=location,
20102011
dataset=dataset,
20112012
)
2012-
actual = MigrationServiceClient.dataset_path(project, location, dataset)
2013+
actual = MigrationServiceClient.dataset_path(project, dataset)
20132014
assert expected == actual
20142015

20152016

20162017
def test_parse_dataset_path():
20172018
expected = {
2018-
"project": "mussel",
2019-
"location": "winkle",
2019+
"project": "winkle",
20202020
"dataset": "nautilus",
20212021
}
20222022
path = MigrationServiceClient.dataset_path(**expected)

0 commit comments

Comments
 (0)