Skip to content

Commit cc4b23d

Browse files
authored
chore: move LearningRateExp to deepmd.utils.learning_rate (#4219)
<!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit - **New Features** - Introduced a new exponential decay learning rate scheduler to enhance training efficiency. - Added functionality to compute learning rates at specific training steps. - **Bug Fixes** - Removed the outdated `LearningRateExp` class from the previous module to avoid confusion. <!-- end of auto-generated comment: release notes by coderabbit.ai --> --------- Signed-off-by: Jinzhe Zeng <[email protected]>
1 parent 1e1090a commit cc4b23d

File tree

2 files changed

+59
-51
lines changed

2 files changed

+59
-51
lines changed

deepmd/dpmodel/utils/learning_rate.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# SPDX-License-Identifier: LGPL-3.0-or-later
2+
import numpy as np
3+
4+
5+
class LearningRateExp:
6+
def __init__(
7+
self,
8+
start_lr,
9+
stop_lr,
10+
decay_steps,
11+
stop_steps,
12+
decay_rate=None,
13+
**kwargs,
14+
):
15+
"""
16+
Construct an exponential-decayed learning rate.
17+
18+
Parameters
19+
----------
20+
start_lr
21+
The learning rate at the start of the training.
22+
stop_lr
23+
The desired learning rate at the end of the training.
24+
When decay_rate is explicitly set, this value will serve as
25+
the minimum learning rate during training. In other words,
26+
if the learning rate decays below stop_lr, stop_lr will be applied instead.
27+
decay_steps
28+
The learning rate is decaying every this number of training steps.
29+
stop_steps
30+
The total training steps for learning rate scheduler.
31+
decay_rate
32+
The decay rate for the learning rate.
33+
If provided, the decay rate will be set instead of
34+
calculating it through interpolation between start_lr and stop_lr.
35+
"""
36+
self.start_lr = start_lr
37+
default_ds = 100 if stop_steps // 10 > 100 else stop_steps // 100 + 1
38+
self.decay_steps = decay_steps
39+
if self.decay_steps >= stop_steps:
40+
self.decay_steps = default_ds
41+
self.decay_rate = np.exp(
42+
np.log(stop_lr / self.start_lr) / (stop_steps / self.decay_steps)
43+
)
44+
if decay_rate is not None:
45+
self.decay_rate = decay_rate
46+
self.min_lr = stop_lr
47+
48+
def value(self, step) -> np.float64:
49+
"""Get the learning rate at the given step."""
50+
step_lr = self.start_lr * np.power(self.decay_rate, step // self.decay_steps)
51+
if step_lr < self.min_lr:
52+
step_lr = self.min_lr
53+
return step_lr

deepmd/pt/utils/learning_rate.py

Lines changed: 6 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,53 +1,8 @@
11
# SPDX-License-Identifier: LGPL-3.0-or-later
2-
import numpy as np
2+
from deepmd.dpmodel.utils.learning_rate import (
3+
LearningRateExp,
4+
)
35

4-
5-
class LearningRateExp:
6-
def __init__(
7-
self,
8-
start_lr,
9-
stop_lr,
10-
decay_steps,
11-
stop_steps,
12-
decay_rate=None,
13-
**kwargs,
14-
):
15-
"""
16-
Construct an exponential-decayed learning rate.
17-
18-
Parameters
19-
----------
20-
start_lr
21-
The learning rate at the start of the training.
22-
stop_lr
23-
The desired learning rate at the end of the training.
24-
When decay_rate is explicitly set, this value will serve as
25-
the minimum learning rate during training. In other words,
26-
if the learning rate decays below stop_lr, stop_lr will be applied instead.
27-
decay_steps
28-
The learning rate is decaying every this number of training steps.
29-
stop_steps
30-
The total training steps for learning rate scheduler.
31-
decay_rate
32-
The decay rate for the learning rate.
33-
If provided, the decay rate will be set instead of
34-
calculating it through interpolation between start_lr and stop_lr.
35-
"""
36-
self.start_lr = start_lr
37-
default_ds = 100 if stop_steps // 10 > 100 else stop_steps // 100 + 1
38-
self.decay_steps = decay_steps
39-
if self.decay_steps >= stop_steps:
40-
self.decay_steps = default_ds
41-
self.decay_rate = np.exp(
42-
np.log(stop_lr / self.start_lr) / (stop_steps / self.decay_steps)
43-
)
44-
if decay_rate is not None:
45-
self.decay_rate = decay_rate
46-
self.min_lr = stop_lr
47-
48-
def value(self, step):
49-
"""Get the learning rate at the given step."""
50-
step_lr = self.start_lr * np.power(self.decay_rate, step // self.decay_steps)
51-
if step_lr < self.min_lr:
52-
step_lr = self.min_lr
53-
return step_lr
6+
__all__ = [
7+
"LearningRateExp",
8+
]

0 commit comments

Comments
 (0)