Browse Source

fix optimizer.decay_weight bug

tags/v0.2.0-alpha
leilei_snow 5 years ago
parent
commit
c4d0bb266a
1 changed files with 1 additions and 1 deletions
  1. +1
    -1
      mindspore/nn/optim/optimizer.py

+ 1
- 1
mindspore/nn/optim/optimizer.py View File

@@ -109,7 +109,7 @@ class Optimizer(Cell):
tuple[Tensor], The gradients after weight decay.
"""
if self.weight_decay > 0:
params = self.params
params = self.parameters
gradients = self.hyper_map(F.partial(apply_decay, self.weight_decay), self.decay_flags, params, gradients)

return gradients


Loading…
Cancel
Save