Browse Source

Update dynamic_lr.py

pull/1/head
bjutsecurity22 2 years ago
parent
commit
67ea2b2065
1 changed files with 6 additions and 0 deletions
  1. +6
    -0
      mindspore/nn/dynamic_lr.py

+ 6
- 0
mindspore/nn/dynamic_lr.py View File

@@ -110,6 +110,7 @@ def _check_inputs(learning_rate, decay_rate, total_step, step_per_epoch, decay_e


def exponential_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, is_stair=False):
# 基于指数衰减函数计算学习率。
r"""
Calculates learning rate base on exponential decay function. The learning rate for each step will
be stored in a list.
@@ -173,6 +174,7 @@ def exponential_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch,


def natural_exp_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, is_stair=False):
# 基于自然指数衰减函数计算学习率。
r"""
Calculates learning rate base on natural exponential decay function. The learning rate for each step will be
stored in a list.
@@ -235,6 +237,7 @@ def natural_exp_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch,
return lr

def inverse_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, is_stair=False):
# 基于逆时衰减函数计算学习率。
r"""
Calculates learning rate base on inverse-time decay function. The learning rate for each step
will be stored in a list.
@@ -307,6 +310,7 @@ def _lr_calc(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch,


def cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch):
# 基于余弦衰减函数计算学习率。
r"""
Calculates learning rate base on cosine decay function. The learning rate for each step will be stored in a list.

@@ -385,6 +389,7 @@ def cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch):

def polynomial_decay_lr(learning_rate, end_learning_rate, total_step, step_per_epoch, decay_epoch, power,
update_decay_epoch=False):
# 基于多项式衰减函数计算学习率。
r"""
Calculates learning rate base on polynomial decay function. The learning rate for each step
will be stored in a list.
@@ -487,6 +492,7 @@ def polynomial_decay_lr(learning_rate, end_learning_rate, total_step, step_per_e


def warmup_lr(learning_rate, total_step, step_per_epoch, warmup_epoch):
# 预热学习率方法。
r"""
Gets learning rate warming up. The learning rate for each step will be stored in a list.



Loading…
Cancel
Save