-
Notifications
You must be signed in to change notification settings - Fork 43
/
Copy pathrun_mantaray_jobs.py
96 lines (88 loc) · 3.23 KB
/
run_mantaray_jobs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DAGs to run Mantaray benchmarks."""
import datetime
from airflow import models
from xlml.utils import mantaray
import yaml
from dags import composer_env
import re
# Skip running this script in unit test because gcs loading will fail.
if composer_env.is_prod_env() or composer_env.is_dev_env():
# Download xlml_jobs.yaml from the borgcron GCS bucket, which
# is pulled nightly from google3.
xlml_jobs_yaml = mantaray.load_file_from_gcs(
f"{mantaray.MANTARAY_G3_GS_BUCKET}/xlml_jobs/xlml_jobs.yaml"
)
xlml_jobs = yaml.safe_load(xlml_jobs_yaml)
# Create two DAG for PyTorch/XLA tests
pattern = r"^(ptxla|pytorchxla_part1).*"
pattern2 = r"^(pytorchxla_part2).*"
workload_file_name_list = []
workload_file_name_list_2 = []
for job in xlml_jobs:
if re.match(pattern, job["task_name"]):
workload_file_name_list.append(job["file_name"])
elif re.match(pattern2, job["task_name"]):
workload_file_name_list_2.append(job["file_name"])
# merge all PyTorch/XLA tests ino one Dag
with models.DAG(
dag_id="pytorch_xla_model_regression_test_on_trillium",
schedule="0 0 * * *", # everyday at midnight
tags=["mantaray", "pytorchxla", "xlml"],
start_date=datetime.datetime(2024, 4, 22),
catchup=False,
) as dag:
for workload_file_name in workload_file_name_list:
run_workload = mantaray.run_workload.override(
task_id=workload_file_name.split(".")[0]
)(
workload_file_name=workload_file_name,
)
run_workload
# split out sd2 model test
with models.DAG(
dag_id="pytorch_xla_model_regression_test_on_trillium_share_zone_2",
schedule="0 0 * * *", # everyday at midnight # job["schedule"],
tags=["mantaray", "pytorchxla", "xlml"],
start_date=datetime.datetime(2024, 4, 22),
catchup=False,
) as dag:
for workload_file_name in workload_file_name_list_2:
run_workload = mantaray.run_workload.override(
task_id=workload_file_name.split(".")[0]
)(
workload_file_name=workload_file_name,
)
run_workload
# Create a DAG for each job from maxtext
for job in xlml_jobs:
if (not re.match(pattern, job["task_name"])) and (
not re.match(pattern2, job["task_name"])
):
with models.DAG(
dag_id=job["task_name"],
schedule=job["schedule"],
tags=["mantaray"],
start_date=datetime.datetime(2024, 4, 22),
catchup=False,
) as dag:
run_workload = mantaray.run_workload(
workload_file_name=job["file_name"],
)
run_workload
else:
print(
"Skipping creating Mantaray DAGs since not running in Prod or Dev composer environment."
)