Browse Source

mod_comments

tags/v1.1.0
wanyiming 5 years ago
parent
commit
dad47cf2dd
11 changed files with 26 additions and 32 deletions
  1. +2
    -2
      mindspore/nn/probability/README.md
  2. +3
    -3
      mindspore/nn/probability/transforms/transform_bnn.py
  3. +3
    -3
      mindspore/train/model.py
  4. +4
    -6
      model_zoo/official/cv/googlenet/README.md
  5. +4
    -4
      model_zoo/official/nlp/bert_thor/src/model_thor.py
  6. +2
    -3
      model_zoo/official/recommend/ncf/README.md
  7. +1
    -1
      model_zoo/official/recommend/ncf/src/ncf.py
  8. +1
    -2
      model_zoo/research/cv/ghostnet/eval.py
  9. +1
    -2
      model_zoo/research/cv/ghostnet_quant/eval.py
  10. +1
    -2
      model_zoo/research/cv/resnet50_adv_pruning/eval.py
  11. +4
    -4
      tests/st/networks/models/resnet50/src_thor/model_thor.py

+ 2
- 2
mindspore/nn/probability/README.md View File

@@ -100,7 +100,7 @@ The loss function `SoftmaxCrossEntropyWithLogits` and the optimizer `AdamWeightD
if __name__ == "__main__":
...
# define the loss function
criterion = SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001)
...
```
@@ -320,7 +320,7 @@ from mindspore.nn import WithLossCell, TrainOneStepCell
if __name__ == "__main__":
network = LeNet5()

criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
optimizer = nn.AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001)

net_with_loss = WithLossCell(network, criterion)


+ 3
- 3
mindspore/nn/probability/transforms/transform_bnn.py View File

@@ -53,7 +53,7 @@ class TransformToBNN:
>>> return out
>>>
>>> net = Net()
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> net_with_loss = WithLossCell(network, criterion)
>>> train_network = TrainOneStepCell(net_with_loss, optim)
@@ -107,7 +107,7 @@ class TransformToBNN:

Examples:
>>> net = Net()
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> net_with_loss = WithLossCell(network, criterion)
>>> train_network = TrainOneStepCell(net_with_loss, optim)
@@ -149,7 +149,7 @@ class TransformToBNN:

Examples:
>>> net = Net()
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> net_with_loss = WithLossCell(network, criterion)
>>> train_network = TrainOneStepCell(net_with_loss, optim)


+ 3
- 3
mindspore/train/model.py View File

@@ -105,7 +105,7 @@ class Model:
>>> return out
>>>
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
>>> dataset = get_dataset()
@@ -543,7 +543,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> loss_scale_manager = FixedLossScaleManager()
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager)
@@ -661,7 +661,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
>>> model.eval(dataset)
"""


+ 4
- 6
model_zoo/official/cv/googlenet/README.md View File

@@ -392,8 +392,7 @@ If you need to use the trained model to perform inference on multiple hardware p
net = GoogleNet(num_classes=cfg.num_classes)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01,
cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean',
is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'})
# Load pre-trained model
@@ -419,8 +418,7 @@ If you need to use the trained model to perform inference on multiple hardware p
net = GoogleNet(num_classes=cfg.num_classes)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01,
cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean',
is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'})
# Load pre-trained model
@@ -453,7 +451,7 @@ If you need to use the trained model to perform inference on multiple hardware p
steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()),
Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)
@@ -487,7 +485,7 @@ If you need to use the trained model to perform inference on multiple hardware p
steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()),
Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)


+ 4
- 4
model_zoo/official/nlp/bert_thor/src/model_thor.py View File

@@ -135,7 +135,7 @@ class Model:
>>> return out
>>>
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
>>> dataset = get_dataset()
@@ -307,7 +307,7 @@ class Model:
>>> train_dataset = get_train_dataset()
>>> valid_dataset = get_valid_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={'acc'})
>>> model.init(train_dataset, valid_dataset)
@@ -597,7 +597,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> loss_scale_manager = FixedLossScaleManager()
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager)
@@ -714,7 +714,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
>>> model.eval(dataset)
"""


+ 2
- 3
model_zoo/official/recommend/ncf/README.md View File

@@ -243,8 +243,7 @@ https://www.mindspore.cn/tutorial/zh-CN/master/use/multi_platform_inference.html
net = GoogleNet(num_classes=cfg.num_classes)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01,
cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean',
is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'})
# Load pre-trained model
@@ -275,7 +274,7 @@ https://www.mindspore.cn/tutorial/zh-CN/master/use/multi_platform_inference.html
steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()),
Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)


+ 1
- 1
model_zoo/official/recommend/ncf/src/ncf.py View File

@@ -199,7 +199,7 @@ class NetWithLossClass(nn.Cell):
"""
def __init__(self, network):
super(NetWithLossClass, self).__init__(auto_prefix=False)
#self.loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
#self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
self.network = network
self.reducesum = P.ReduceSum(keep_dims=False)


+ 1
- 2
model_zoo/research/cv/ghostnet/eval.py View File

@@ -50,8 +50,7 @@ if __name__ == '__main__':
else:
raise ValueError("Unsupport platform.")

loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True, reduction='mean')
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')

if args_opt.model == 'ghostnet':
net = ghostnet_1x(num_classes=config_platform.num_classes)


+ 1
- 2
model_zoo/research/cv/ghostnet_quant/eval.py View File

@@ -49,8 +49,7 @@ if __name__ == '__main__':
else:
raise ValueError("Unsupport platform.")

loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True, reduction='mean')
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')

net = ghostnet_1x(num_classes=config_platform.num_classes)



+ 1
- 2
model_zoo/research/cv/resnet50_adv_pruning/eval.py View File

@@ -53,8 +53,7 @@ if __name__ == '__main__':
else:
raise ValueError("Unsupport platform.")

loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True, reduction='mean')
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')

if args_opt.platform == "Ascend":
net.to_float(mstype.float16)


+ 4
- 4
tests/st/networks/models/resnet50/src_thor/model_thor.py View File

@@ -128,7 +128,7 @@ class Model:
>>> return out
>>>
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
>>> dataset = get_dataset()
@@ -295,7 +295,7 @@ class Model:
>>> train_dataset = get_train_dataset()
>>> valid_dataset = get_valid_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={'acc'})
>>> model.init(train_dataset, valid_dataset)
@@ -566,7 +566,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> loss_scale_manager = FixedLossScaleManager()
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager)
@@ -678,7 +678,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
>>> model.eval(dataset)
"""


Loading…
Cancel
Save