From 67ea2b2065abb7b968bcd982e4dc1b7a9827729b Mon Sep 17 00:00:00 2001 From: bjutsecurity22 Date: Mon, 4 Sep 2023 18:12:17 +0800 Subject: [PATCH] Update dynamic_lr.py --- mindspore/nn/dynamic_lr.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mindspore/nn/dynamic_lr.py b/mindspore/nn/dynamic_lr.py index 4050323722..0bd4d2bfd1 100644 --- a/mindspore/nn/dynamic_lr.py +++ b/mindspore/nn/dynamic_lr.py @@ -110,6 +110,7 @@ def _check_inputs(learning_rate, decay_rate, total_step, step_per_epoch, decay_e def exponential_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, is_stair=False): + # 基于指数衰减函数计算学习率。 r""" Calculates learning rate base on exponential decay function. The learning rate for each step will be stored in a list. @@ -173,6 +174,7 @@ def exponential_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, def natural_exp_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, is_stair=False): + # 基于自然指数衰减函数计算学习率。 r""" Calculates learning rate base on natural exponential decay function. The learning rate for each step will be stored in a list. @@ -235,6 +237,7 @@ def natural_exp_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, return lr def inverse_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, is_stair=False): + # 基于逆时衰减函数计算学习率。 r""" Calculates learning rate base on inverse-time decay function. The learning rate for each step will be stored in a list. @@ -307,6 +310,7 @@ def _lr_calc(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, def cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch): + # 基于余弦衰减函数计算学习率。 r""" Calculates learning rate base on cosine decay function. The learning rate for each step will be stored in a list. @@ -385,6 +389,7 @@ def cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch): def polynomial_decay_lr(learning_rate, end_learning_rate, total_step, step_per_epoch, decay_epoch, power, update_decay_epoch=False): + # 基于多项式衰减函数计算学习率。 r""" Calculates learning rate base on polynomial decay function. The learning rate for each step will be stored in a list. @@ -487,6 +492,7 @@ def polynomial_decay_lr(learning_rate, end_learning_rate, total_step, step_per_e def warmup_lr(learning_rate, total_step, step_per_epoch, warmup_epoch): + # 预热学习率方法。 r""" Gets learning rate warming up. The learning rate for each step will be stored in a list.