From c4d0bb266a59f13ecfd680b3d839fac3dcede50f Mon Sep 17 00:00:00 2001 From: leilei_snow Date: Wed, 15 Apr 2020 10:00:27 +0000 Subject: [PATCH] fix optimizer.decay_weight bug --- mindspore/nn/optim/optimizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspore/nn/optim/optimizer.py b/mindspore/nn/optim/optimizer.py index c2a419c565..bfbde78fff 100755 --- a/mindspore/nn/optim/optimizer.py +++ b/mindspore/nn/optim/optimizer.py @@ -109,7 +109,7 @@ class Optimizer(Cell): tuple[Tensor], The gradients after weight decay. """ if self.weight_decay > 0: - params = self.params + params = self.parameters gradients = self.hyper_map(F.partial(apply_decay, self.weight_decay), self.decay_flags, params, gradients) return gradients