Skip to content

Commit 88188d2

Browse files
Ark-kuncopybara-github
authored andcommitted
feat: GenAI - Tuning - Supervised - Added support for the adapter_size parameter
PiperOrigin-RevId: 631251312
1 parent 20b1866 commit 88188d2

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

vertexai/tuning/_supervised_tuning.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414
#
1515

16-
from typing import Optional, Union
16+
from typing import Literal, Optional, Union
1717

1818
from google.cloud.aiplatform_v1.types import tuning_job as gca_tuning_job_types
1919

@@ -29,6 +29,7 @@ def train(
2929
tuned_model_display_name: Optional[str] = None,
3030
epochs: Optional[int] = None,
3131
learning_rate_multiplier: Optional[float] = None,
32+
adapter_size: Optional[Literal[1, 4, 8, 16]] = None,
3233
) -> "SupervisedTuningJob":
3334
"""Tunes a model using supervised training.
3435
@@ -44,6 +45,7 @@ def train(
4445
be up to 128 characters long and can consist of any UTF-8 characters.
4546
epochs: Number of training epoches for this tuning job.
4647
learning_rate_multiplier: Learning rate multiplier for tuning.
48+
adapter_size: Adapter size for tuning.
4749
4850
Returns:
4951
A `TuningJob` object.
@@ -54,6 +56,7 @@ def train(
5456
hyper_parameters=gca_tuning_job_types.SupervisedHyperParameters(
5557
epoch_count=epochs,
5658
learning_rate_multiplier=learning_rate_multiplier,
59+
adapter_size=adapter_size,
5760
),
5861
)
5962

0 commit comments

Comments
 (0)