Skip to content

Commit be0ccc4

Browse files
feat: add PredictRequestResponseLoggingConfig to Endpoint in aiplatform v1 endpoint.proto (#1072)
* feat: add PredictRequestResponseLoggingConfig to Endpoint in aiplatform v1 endpoint.proto PiperOrigin-RevId: 433794371 Source-Link: googleapis/googleapis@e0f0642 Source-Link: googleapis/googleapis-gen@3afc3ec Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2FmYzNlYzE4ZTc5NWM4NTkyMzFhMmMwZjhlZjE3MmE2NTA1NmUwZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent f10a1d4 commit be0ccc4

File tree

7 files changed

+71
-27
lines changed

7 files changed

+71
-27
lines changed

google/cloud/aiplatform_v1/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@
9393
from .types.encryption_spec import EncryptionSpec
9494
from .types.endpoint import DeployedModel
9595
from .types.endpoint import Endpoint
96+
from .types.endpoint import PredictRequestResponseLoggingConfig
9697
from .types.endpoint import PrivateEndpoints
9798
from .types.endpoint_service import CreateEndpointOperationMetadata
9899
from .types.endpoint_service import CreateEndpointRequest
@@ -831,6 +832,7 @@
831832
"Port",
832833
"PredefinedSplit",
833834
"PredictRequest",
835+
"PredictRequestResponseLoggingConfig",
834836
"PredictResponse",
835837
"PredictSchemata",
836838
"PredictionServiceClient",

google/cloud/aiplatform_v1/services/migration_service/client.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -199,32 +199,32 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
199199
return m.groupdict() if m else {}
200200

201201
@staticmethod
202-
def dataset_path(project: str, location: str, dataset: str,) -> str:
202+
def dataset_path(project: str, dataset: str,) -> str:
203203
"""Returns a fully-qualified dataset string."""
204-
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
205-
project=project, location=location, dataset=dataset,
204+
return "projects/{project}/datasets/{dataset}".format(
205+
project=project, dataset=dataset,
206206
)
207207

208208
@staticmethod
209209
def parse_dataset_path(path: str) -> Dict[str, str]:
210210
"""Parses a dataset path into its component segments."""
211-
m = re.match(
212-
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
213-
path,
214-
)
211+
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
215212
return m.groupdict() if m else {}
216213

217214
@staticmethod
218-
def dataset_path(project: str, dataset: str,) -> str:
215+
def dataset_path(project: str, location: str, dataset: str,) -> str:
219216
"""Returns a fully-qualified dataset string."""
220-
return "projects/{project}/datasets/{dataset}".format(
221-
project=project, dataset=dataset,
217+
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
218+
project=project, location=location, dataset=dataset,
222219
)
223220

224221
@staticmethod
225222
def parse_dataset_path(path: str) -> Dict[str, str]:
226223
"""Parses a dataset path into its component segments."""
227-
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
224+
m = re.match(
225+
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
226+
path,
227+
)
228228
return m.groupdict() if m else {}
229229

230230
@staticmethod

google/cloud/aiplatform_v1/types/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@
6565
from .endpoint import (
6666
DeployedModel,
6767
Endpoint,
68+
PredictRequestResponseLoggingConfig,
6869
PrivateEndpoints,
6970
)
7071
from .endpoint_service import (
@@ -546,6 +547,7 @@
546547
"EncryptionSpec",
547548
"DeployedModel",
548549
"Endpoint",
550+
"PredictRequestResponseLoggingConfig",
549551
"PrivateEndpoints",
550552
"CreateEndpointOperationMetadata",
551553
"CreateEndpointRequest",

google/cloud/aiplatform_v1/types/endpoint.py

+40-1
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,19 @@
1717

1818
from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec
1919
from google.cloud.aiplatform_v1.types import explanation
20+
from google.cloud.aiplatform_v1.types import io
2021
from google.cloud.aiplatform_v1.types import machine_resources
2122
from google.protobuf import timestamp_pb2 # type: ignore
2223

2324

2425
__protobuf__ = proto.module(
2526
package="google.cloud.aiplatform.v1",
26-
manifest={"Endpoint", "DeployedModel", "PrivateEndpoints",},
27+
manifest={
28+
"Endpoint",
29+
"DeployedModel",
30+
"PrivateEndpoints",
31+
"PredictRequestResponseLoggingConfig",
32+
},
2733
)
2834

2935

@@ -113,6 +119,9 @@ class Endpoint(proto.Message):
113119
associated with this Endpoint if monitoring is enabled by
114120
[CreateModelDeploymentMonitoringJob][]. Format:
115121
``projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}``
122+
predict_request_response_logging_config (google.cloud.aiplatform_v1.types.PredictRequestResponseLoggingConfig):
123+
Configures the request-response logging for
124+
online prediction.
116125
"""
117126

118127
name = proto.Field(proto.STRING, number=1,)
@@ -132,6 +141,9 @@ class Endpoint(proto.Message):
132141
network = proto.Field(proto.STRING, number=13,)
133142
enable_private_service_connect = proto.Field(proto.BOOL, number=17,)
134143
model_deployment_monitoring_job = proto.Field(proto.STRING, number=14,)
144+
predict_request_response_logging_config = proto.Field(
145+
proto.MESSAGE, number=18, message="PredictRequestResponseLoggingConfig",
146+
)
135147

136148

137149
class DeployedModel(proto.Message):
@@ -286,4 +298,31 @@ class PrivateEndpoints(proto.Message):
286298
service_attachment = proto.Field(proto.STRING, number=4,)
287299

288300

301+
class PredictRequestResponseLoggingConfig(proto.Message):
302+
r"""Configuration for logging request-response to a BigQuery
303+
table.
304+
305+
Attributes:
306+
enabled (bool):
307+
If logging is enabled or not.
308+
sampling_rate (float):
309+
Percentage of requests to be logged, expressed as a fraction
310+
in range(0,1].
311+
bigquery_destination (google.cloud.aiplatform_v1.types.BigQueryDestination):
312+
BigQuery table for logging. If only given project, a new
313+
dataset will be created with name
314+
``logging_<endpoint-display-name>_<endpoint-id>`` where will
315+
be made BigQuery-dataset-name compatible (e.g. most special
316+
characters will become underscores). If no table name is
317+
given, a new table will be created with name
318+
``request_response_logging``
319+
"""
320+
321+
enabled = proto.Field(proto.BOOL, number=1,)
322+
sampling_rate = proto.Field(proto.DOUBLE, number=2,)
323+
bigquery_destination = proto.Field(
324+
proto.MESSAGE, number=3, message=io.BigQueryDestination,
325+
)
326+
327+
289328
__all__ = tuple(sorted(__protobuf__.manifest))

google/cloud/aiplatform_v1/types/model_monitoring.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030

3131

3232
class ModelMonitoringObjectiveConfig(proto.Message):
33-
r"""Next ID: 6
33+
r"""Next ID: 7
3434
3535
Attributes:
3636
training_dataset (google.cloud.aiplatform_v1.types.ModelMonitoringObjectiveConfig.TrainingDataset):

tests/unit/gapic/aiplatform_v1/test_endpoint_service.py

+1
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@
4848
from google.cloud.aiplatform_v1.types import endpoint_service
4949
from google.cloud.aiplatform_v1.types import explanation
5050
from google.cloud.aiplatform_v1.types import explanation_metadata
51+
from google.cloud.aiplatform_v1.types import io
5152
from google.cloud.aiplatform_v1.types import machine_resources
5253
from google.cloud.aiplatform_v1.types import operation as gca_operation
5354
from google.longrunning import operations_pb2

tests/unit/gapic/aiplatform_v1/test_migration_service.py

+14-14
Original file line numberDiff line numberDiff line change
@@ -1817,20 +1817,18 @@ def test_parse_dataset_path():
18171817

18181818
def test_dataset_path():
18191819
project = "squid"
1820-
location = "clam"
1821-
dataset = "whelk"
1822-
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
1823-
project=project, location=location, dataset=dataset,
1820+
dataset = "clam"
1821+
expected = "projects/{project}/datasets/{dataset}".format(
1822+
project=project, dataset=dataset,
18241823
)
1825-
actual = MigrationServiceClient.dataset_path(project, location, dataset)
1824+
actual = MigrationServiceClient.dataset_path(project, dataset)
18261825
assert expected == actual
18271826

18281827

18291828
def test_parse_dataset_path():
18301829
expected = {
1831-
"project": "octopus",
1832-
"location": "oyster",
1833-
"dataset": "nudibranch",
1830+
"project": "whelk",
1831+
"dataset": "octopus",
18341832
}
18351833
path = MigrationServiceClient.dataset_path(**expected)
18361834

@@ -1840,18 +1838,20 @@ def test_parse_dataset_path():
18401838

18411839

18421840
def test_dataset_path():
1843-
project = "cuttlefish"
1844-
dataset = "mussel"
1845-
expected = "projects/{project}/datasets/{dataset}".format(
1846-
project=project, dataset=dataset,
1841+
project = "oyster"
1842+
location = "nudibranch"
1843+
dataset = "cuttlefish"
1844+
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
1845+
project=project, location=location, dataset=dataset,
18471846
)
1848-
actual = MigrationServiceClient.dataset_path(project, dataset)
1847+
actual = MigrationServiceClient.dataset_path(project, location, dataset)
18491848
assert expected == actual
18501849

18511850

18521851
def test_parse_dataset_path():
18531852
expected = {
1854-
"project": "winkle",
1853+
"project": "mussel",
1854+
"location": "winkle",
18551855
"dataset": "nautilus",
18561856
}
18571857
path = MigrationServiceClient.dataset_path(**expected)

0 commit comments

Comments
 (0)