Browse Source

!8290 modify example

From: @lijiaqi0612
Reviewed-by: @liangchenghui,@kingxian
Signed-off-by: @liangchenghui
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
a8478839c9
5 changed files with 8 additions and 8 deletions
  1. +1
    -1
      mindspore/nn/optim/adam.py
  2. +1
    -1
      mindspore/nn/optim/ftrl.py
  3. +1
    -1
      mindspore/nn/optim/lazyadam.py
  4. +1
    -1
      mindspore/nn/optim/proximal_ada_grad.py
  5. +4
    -4
      mindspore/nn/wrap/loss_scale.py

+ 1
- 1
mindspore/nn/optim/adam.py View File

@@ -337,7 +337,7 @@ class Adam(Optimizer):
"""If the input value is set to "CPU", the parameters will be updated on the host using the Fused
optimizer operation."""
if not isinstance(value, str):
raise ValueError("The value must be str type, but got value type is {}".format(type(value)))
raise TypeError("The value must be str type, but got value type is {}".format(type(value)))

if value not in ('CPU', 'Ascend'):
raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value))


+ 1
- 1
mindspore/nn/optim/ftrl.py View File

@@ -190,7 +190,7 @@ class FTRL(Optimizer):
"""If the input value is set to "CPU", the parameters will be updated on the host using the Fused
optimizer operation."""
if not isinstance(value, str):
raise ValueError("The value must be str type, but got value type is {}".format(type(value)))
raise TypeError("The value must be str type, but got value type is {}".format(type(value)))

if value not in ('CPU', 'Ascend'):
raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value))


+ 1
- 1
mindspore/nn/optim/lazyadam.py View File

@@ -255,7 +255,7 @@ class LazyAdam(Optimizer):
"""If the input value is set to "CPU", the parameters will be updated on the host using the Fused
optimizer operation."""
if not isinstance(value, str):
raise ValueError("The value must be str type, but got value type is {}".format(type(value)))
raise TypeError("The value must be str type, but got value type is {}".format(type(value)))

if value not in ('CPU', 'Ascend'):
raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value))


+ 1
- 1
mindspore/nn/optim/proximal_ada_grad.py View File

@@ -159,7 +159,7 @@ class ProximalAdagrad(Optimizer):
"""If the input value is set to "CPU", the parameters will be updated on the host using the Fused
optimizer operation."""
if not isinstance(value, str):
raise ValueError("The value must be str type, but got value type is {}".format(type(value)))
raise TypeError("The value must be str type, but got value type is {}".format(type(value)))
if value not in ('CPU', 'Ascend'):
raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value))


+ 4
- 4
mindspore/nn/wrap/loss_scale.py View File

@@ -79,13 +79,13 @@ class DynamicLossScaleUpdateCell(Cell):
>>> net_with_loss = Net()
>>> optimizer = nn.Momentum(net_with_loss.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> manager = nn.DynamicLossScaleUpdateCell(loss_scale_value=2**12, scale_factor=2, scale_window=1000)
>>> train_network = nn.TrainOneStepWithLossScaleCell(net_with_loss, optimizer, scale_update_cell=manager)
>>> train_network = nn.TrainOneStepWithLossScaleCell(net_with_loss, optimizer, scale_sense=manager)
>>> train_network.set_train()
>>>
>>> inputs = Tensor(np.ones([16, 16]).astype(np.float32))
>>> label = Tensor(np.zeros([16, 16]).astype(np.float32))
>>> scaling_sens = Tensor(np.full((1), np.finfo(np.float32).max), dtype=mindspore.float32)
>>> output = train_network(inputs, label, scaling_sens)
>>> output = train_network(inputs, label, scale_sense=scaling_sens)
"""

def __init__(self,
@@ -145,13 +145,13 @@ class FixedLossScaleUpdateCell(Cell):
>>> net_with_loss = Net()
>>> optimizer = nn.Momentum(net_with_loss.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> manager = nn.FixedLossScaleUpdateCell(loss_scale_value=2**12)
>>> train_network = nn.TrainOneStepWithLossScaleCell(net_with_loss, optimizer, scale_update_cell=manager)
>>> train_network = nn.TrainOneStepWithLossScaleCell(net_with_loss, optimizer, scale_sense=manager)
>>> train_network.set_train()
>>>
>>> inputs = Tensor(np.ones([16, 16]).astype(np.float32))
>>> label = Tensor(np.zeros([16, 16]).astype(np.float32))
>>> scaling_sens = Tensor(np.full((1), np.finfo(np.float32).max), dtype=mindspore.float32)
>>> output = train_network(inputs, label, scaling_sens)
>>> output = train_network(inputs, label, scale_sense=scaling_sens)
"""

def __init__(self, loss_scale_value):


Loading…
Cancel
Save