Skip to content

Commit 8ddc062

Browse files
vertex-sdk-botcopybara-github
authored andcommitted
docs: Add temporal fusion transformer (TFT) model code sample.
PiperOrigin-RevId: 511322674
1 parent f8052b8 commit 8ddc062

5 files changed

+194
-4
lines changed

samples/model-builder/conftest.py

+15
Original file line numberDiff line numberDiff line change
@@ -264,6 +264,21 @@ def mock_run_automl_forecasting_seq2seq_training_job(mock_forecasting_training_j
264264
yield mock
265265

266266

267+
@pytest.fixture
268+
def mock_get_automl_forecasting_tft_training_job(mock_forecasting_training_job):
269+
with patch.object(
270+
aiplatform, "TemporalFusionTransformerForecastingTrainingJob"
271+
) as mock:
272+
mock.return_value = mock_forecasting_training_job
273+
yield mock
274+
275+
276+
@pytest.fixture
277+
def mock_run_automl_forecasting_tft_training_job(mock_forecasting_training_job):
278+
with patch.object(mock_forecasting_training_job, "run") as mock:
279+
yield mock
280+
281+
267282
@pytest.fixture
268283
def mock_get_automl_image_training_job(mock_image_training_job):
269284
with patch.object(aiplatform, "AutoMLImageTrainingJob") as mock:

samples/model-builder/create_training_pipeline_forecasting_seq2seq_sample.py

-2
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@ def create_training_pipeline_forecasting_seq2seq_sample(
4343
export_evaluated_data_items: bool = False,
4444
export_evaluated_data_items_bigquery_destination_uri: Optional[str] = None,
4545
export_evaluated_data_items_override_destination: bool = False,
46-
quantiles: Optional[List[float]] = None,
4746
validation_options: Optional[str] = None,
4847
predefined_split_column_name: Optional[str] = None,
4948
sync: bool = True,
@@ -80,7 +79,6 @@ def create_training_pipeline_forecasting_seq2seq_sample(
8079
export_evaluated_data_items=export_evaluated_data_items,
8180
export_evaluated_data_items_bigquery_destination_uri=export_evaluated_data_items_bigquery_destination_uri,
8281
export_evaluated_data_items_override_destination=export_evaluated_data_items_override_destination,
83-
quantiles=quantiles,
8482
validation_options=validation_options,
8583
budget_milli_node_hours=budget_milli_node_hours,
8684
model_display_name=model_display_name,

samples/model-builder/create_training_pipeline_forecasting_seq2seq_sample_test.py

-2
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@ def test_create_training_pipeline_forecasting_seq2seq_sample(
4242
export_evaluated_data_items=constants.EXPORT_EVALUATED_DATA_ITEMS,
4343
export_evaluated_data_items_bigquery_destination_uri=constants.EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI,
4444
export_evaluated_data_items_override_destination=constants.EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION,
45-
quantiles=constants.QUANTILES,
4645
validation_options=constants.VALIDATION_OPTIONS,
4746
predefined_split_column_name=constants.PREDEFINED_SPLIT_COLUMN_NAME,
4847
)
@@ -78,7 +77,6 @@ def test_create_training_pipeline_forecasting_seq2seq_sample(
7877
export_evaluated_data_items=constants.EXPORT_EVALUATED_DATA_ITEMS,
7978
export_evaluated_data_items_bigquery_destination_uri=constants.EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI,
8079
export_evaluated_data_items_override_destination=constants.EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION,
81-
quantiles=constants.QUANTILES,
8280
validation_options=constants.VALIDATION_OPTIONS,
8381
predefined_split_column_name=constants.PREDEFINED_SPLIT_COLUMN_NAME,
8482
sync=True,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from typing import List, Optional
16+
17+
from google.cloud import aiplatform
18+
19+
20+
# [START aiplatform_sdk_create_training_pipeline_forecasting_tft_sample]
21+
def create_training_pipeline_forecasting_temporal_fusion_transformer_sample(
22+
project: str,
23+
display_name: str,
24+
dataset_id: str,
25+
location: str = "us-central1",
26+
model_display_name: str = "my_model",
27+
target_column: str = "target_column",
28+
time_column: str = "date",
29+
time_series_identifier_column: str = "time_series_id",
30+
unavailable_at_forecast_columns: List[str] = [],
31+
available_at_forecast_columns: List[str] = [],
32+
forecast_horizon: int = 1,
33+
data_granularity_unit: str = "week",
34+
data_granularity_count: int = 1,
35+
training_fraction_split: float = 0.8,
36+
validation_fraction_split: float = 0.1,
37+
test_fraction_split: float = 0.1,
38+
budget_milli_node_hours: int = 8000,
39+
timestamp_split_column_name: str = "timestamp_split",
40+
weight_column: str = "weight",
41+
time_series_attribute_columns: List[str] = [],
42+
context_window: int = 0,
43+
export_evaluated_data_items: bool = False,
44+
export_evaluated_data_items_bigquery_destination_uri: Optional[str] = None,
45+
export_evaluated_data_items_override_destination: bool = False,
46+
validation_options: Optional[str] = None,
47+
predefined_split_column_name: Optional[str] = None,
48+
sync: bool = True,
49+
):
50+
aiplatform.init(project=project, location=location)
51+
52+
# Create training job
53+
forecasting_tft_job = aiplatform.TemporalFusionTransformerForecastingTrainingJob(
54+
display_name=display_name,
55+
optimization_objective="minimize-rmse",
56+
)
57+
58+
# Retrieve existing dataset
59+
dataset = aiplatform.TimeSeriesDataset(dataset_id)
60+
61+
# Run training job
62+
model = forecasting_tft_job.run(
63+
dataset=dataset,
64+
target_column=target_column,
65+
time_column=time_column,
66+
time_series_identifier_column=time_series_identifier_column,
67+
unavailable_at_forecast_columns=unavailable_at_forecast_columns,
68+
available_at_forecast_columns=available_at_forecast_columns,
69+
forecast_horizon=forecast_horizon,
70+
data_granularity_unit=data_granularity_unit,
71+
data_granularity_count=data_granularity_count,
72+
training_fraction_split=training_fraction_split,
73+
validation_fraction_split=validation_fraction_split,
74+
test_fraction_split=test_fraction_split,
75+
predefined_split_column_name=predefined_split_column_name,
76+
timestamp_split_column_name=timestamp_split_column_name,
77+
weight_column=weight_column,
78+
time_series_attribute_columns=time_series_attribute_columns,
79+
context_window=context_window,
80+
export_evaluated_data_items=export_evaluated_data_items,
81+
export_evaluated_data_items_bigquery_destination_uri=export_evaluated_data_items_bigquery_destination_uri,
82+
export_evaluated_data_items_override_destination=export_evaluated_data_items_override_destination,
83+
validation_options=validation_options,
84+
budget_milli_node_hours=budget_milli_node_hours,
85+
model_display_name=model_display_name,
86+
sync=sync,
87+
)
88+
89+
model.wait()
90+
91+
print(model.display_name)
92+
print(model.resource_name)
93+
print(model.uri)
94+
return model
95+
96+
97+
# [END aiplatform_sdk_create_training_pipeline_forecasting_tft_sample]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
import create_training_pipeline_forecasting_tft_sample
17+
import test_constants as constants
18+
19+
20+
def test_create_training_pipeline_forecasting_tft_sample(
21+
mock_sdk_init,
22+
mock_time_series_dataset,
23+
mock_get_automl_forecasting_tft_training_job,
24+
mock_run_automl_forecasting_tft_training_job,
25+
mock_get_time_series_dataset,
26+
):
27+
create_training_pipeline_forecasting_tft_sample.create_training_pipeline_forecasting_temporal_fusion_transformer_sample(
28+
project=constants.PROJECT,
29+
display_name=constants.DISPLAY_NAME,
30+
dataset_id=constants.RESOURCE_ID,
31+
model_display_name=constants.DISPLAY_NAME_2,
32+
target_column=constants.TABULAR_TARGET_COLUMN,
33+
training_fraction_split=constants.TRAINING_FRACTION_SPLIT,
34+
validation_fraction_split=constants.VALIDATION_FRACTION_SPLIT,
35+
test_fraction_split=constants.TEST_FRACTION_SPLIT,
36+
budget_milli_node_hours=constants.BUDGET_MILLI_NODE_HOURS_8000,
37+
timestamp_split_column_name=constants.TIMESTAMP_SPLIT_COLUMN_NAME,
38+
weight_column=constants.WEIGHT_COLUMN,
39+
time_series_attribute_columns=constants.TIME_SERIES_ATTRIBUTE_COLUMNS,
40+
context_window=constants.CONTEXT_WINDOW,
41+
export_evaluated_data_items=constants.EXPORT_EVALUATED_DATA_ITEMS,
42+
export_evaluated_data_items_bigquery_destination_uri=constants.EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI,
43+
export_evaluated_data_items_override_destination=constants.EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION,
44+
validation_options=constants.VALIDATION_OPTIONS,
45+
predefined_split_column_name=constants.PREDEFINED_SPLIT_COLUMN_NAME,
46+
)
47+
48+
mock_get_time_series_dataset.assert_called_once_with(constants.RESOURCE_ID)
49+
50+
mock_sdk_init.assert_called_once_with(
51+
project=constants.PROJECT, location=constants.LOCATION
52+
)
53+
mock_get_automl_forecasting_tft_training_job.assert_called_once_with(
54+
display_name=constants.DISPLAY_NAME,
55+
optimization_objective="minimize-rmse",
56+
)
57+
mock_run_automl_forecasting_tft_training_job.assert_called_once_with(
58+
dataset=mock_time_series_dataset,
59+
target_column=constants.TABULAR_TARGET_COLUMN,
60+
time_column=constants.FORECASTNG_TIME_COLUMN,
61+
time_series_identifier_column=constants.FORECASTNG_TIME_SERIES_IDENTIFIER_COLUMN,
62+
unavailable_at_forecast_columns=constants.FORECASTNG_UNAVAILABLE_AT_FORECAST_COLUMNS,
63+
available_at_forecast_columns=constants.FORECASTNG_AVAILABLE_AT_FORECAST_COLUMNS,
64+
forecast_horizon=constants.FORECASTNG_FORECAST_HORIZON,
65+
data_granularity_unit=constants.DATA_GRANULARITY_UNIT,
66+
data_granularity_count=constants.DATA_GRANULARITY_COUNT,
67+
training_fraction_split=constants.TRAINING_FRACTION_SPLIT,
68+
validation_fraction_split=constants.VALIDATION_FRACTION_SPLIT,
69+
test_fraction_split=constants.TEST_FRACTION_SPLIT,
70+
budget_milli_node_hours=constants.BUDGET_MILLI_NODE_HOURS_8000,
71+
model_display_name=constants.DISPLAY_NAME_2,
72+
timestamp_split_column_name=constants.TIMESTAMP_SPLIT_COLUMN_NAME,
73+
weight_column=constants.WEIGHT_COLUMN,
74+
time_series_attribute_columns=constants.TIME_SERIES_ATTRIBUTE_COLUMNS,
75+
context_window=constants.CONTEXT_WINDOW,
76+
export_evaluated_data_items=constants.EXPORT_EVALUATED_DATA_ITEMS,
77+
export_evaluated_data_items_bigquery_destination_uri=constants.EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI,
78+
export_evaluated_data_items_override_destination=constants.EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION,
79+
validation_options=constants.VALIDATION_OPTIONS,
80+
predefined_split_column_name=constants.PREDEFINED_SPLIT_COLUMN_NAME,
81+
sync=True,
82+
)

0 commit comments

Comments
 (0)