From dad47cf2dd01a3d083b9262fb9549c54d9f58487 Mon Sep 17 00:00:00 2001 From: wanyiming Date: Thu, 19 Nov 2020 16:03:40 +0800 Subject: [PATCH] mod_comments --- mindspore/nn/probability/README.md | 4 ++-- mindspore/nn/probability/transforms/transform_bnn.py | 6 +++--- mindspore/train/model.py | 6 +++--- model_zoo/official/cv/googlenet/README.md | 10 ++++------ model_zoo/official/nlp/bert_thor/src/model_thor.py | 8 ++++---- model_zoo/official/recommend/ncf/README.md | 5 ++--- model_zoo/official/recommend/ncf/src/ncf.py | 2 +- model_zoo/research/cv/ghostnet/eval.py | 3 +-- model_zoo/research/cv/ghostnet_quant/eval.py | 3 +-- model_zoo/research/cv/resnet50_adv_pruning/eval.py | 3 +-- .../st/networks/models/resnet50/src_thor/model_thor.py | 8 ++++---- 11 files changed, 26 insertions(+), 32 deletions(-) diff --git a/mindspore/nn/probability/README.md b/mindspore/nn/probability/README.md index 68a5e42f2b..7e26e4a0fb 100644 --- a/mindspore/nn/probability/README.md +++ b/mindspore/nn/probability/README.md @@ -100,7 +100,7 @@ The loss function `SoftmaxCrossEntropyWithLogits` and the optimizer `AdamWeightD if __name__ == "__main__": ... # define the loss function - criterion = SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean") + criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001) ... ``` @@ -320,7 +320,7 @@ from mindspore.nn import WithLossCell, TrainOneStepCell if __name__ == "__main__": network = LeNet5() - criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean") + criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean") optimizer = nn.AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001) net_with_loss = WithLossCell(network, criterion) diff --git a/mindspore/nn/probability/transforms/transform_bnn.py b/mindspore/nn/probability/transforms/transform_bnn.py index cb6cf0b48b..67c7659174 100644 --- a/mindspore/nn/probability/transforms/transform_bnn.py +++ b/mindspore/nn/probability/transforms/transform_bnn.py @@ -53,7 +53,7 @@ class TransformToBNN: >>> return out >>> >>> net = Net() - >>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> net_with_loss = WithLossCell(network, criterion) >>> train_network = TrainOneStepCell(net_with_loss, optim) @@ -107,7 +107,7 @@ class TransformToBNN: Examples: >>> net = Net() - >>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> net_with_loss = WithLossCell(network, criterion) >>> train_network = TrainOneStepCell(net_with_loss, optim) @@ -149,7 +149,7 @@ class TransformToBNN: Examples: >>> net = Net() - >>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> net_with_loss = WithLossCell(network, criterion) >>> train_network = TrainOneStepCell(net_with_loss, optim) diff --git a/mindspore/train/model.py b/mindspore/train/model.py index 63f3b73ddc..ffe9fb64d5 100755 --- a/mindspore/train/model.py +++ b/mindspore/train/model.py @@ -105,7 +105,7 @@ class Model: >>> return out >>> >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None) >>> dataset = get_dataset() @@ -543,7 +543,7 @@ class Model: Examples: >>> dataset = get_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> loss_scale_manager = FixedLossScaleManager() >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager) @@ -661,7 +661,7 @@ class Model: Examples: >>> dataset = get_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'}) >>> model.eval(dataset) """ diff --git a/model_zoo/official/cv/googlenet/README.md b/model_zoo/official/cv/googlenet/README.md index 8c24152c3e..0682c5eab8 100644 --- a/model_zoo/official/cv/googlenet/README.md +++ b/model_zoo/official/cv/googlenet/README.md @@ -392,8 +392,7 @@ If you need to use the trained model to perform inference on multiple hardware p net = GoogleNet(num_classes=cfg.num_classes) opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, cfg.momentum, weight_decay=cfg.weight_decay) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', - is_grad=False) + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}) # Load pre-trained model @@ -419,8 +418,7 @@ If you need to use the trained model to perform inference on multiple hardware p net = GoogleNet(num_classes=cfg.num_classes) opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, cfg.momentum, weight_decay=cfg.weight_decay) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', - is_grad=False) + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}) # Load pre-trained model @@ -453,7 +451,7 @@ If you need to use the trained model to perform inference on multiple hardware p steps_per_epoch=batch_num) opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False) + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}, amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None) @@ -487,7 +485,7 @@ If you need to use the trained model to perform inference on multiple hardware p steps_per_epoch=batch_num) opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False) + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}, amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None) diff --git a/model_zoo/official/nlp/bert_thor/src/model_thor.py b/model_zoo/official/nlp/bert_thor/src/model_thor.py index 1f5a39b5e9..220e625101 100644 --- a/model_zoo/official/nlp/bert_thor/src/model_thor.py +++ b/model_zoo/official/nlp/bert_thor/src/model_thor.py @@ -135,7 +135,7 @@ class Model: >>> return out >>> >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None) >>> dataset = get_dataset() @@ -307,7 +307,7 @@ class Model: >>> train_dataset = get_train_dataset() >>> valid_dataset = get_valid_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={'acc'}) >>> model.init(train_dataset, valid_dataset) @@ -597,7 +597,7 @@ class Model: Examples: >>> dataset = get_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> loss_scale_manager = FixedLossScaleManager() >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager) @@ -714,7 +714,7 @@ class Model: Examples: >>> dataset = get_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'}) >>> model.eval(dataset) """ diff --git a/model_zoo/official/recommend/ncf/README.md b/model_zoo/official/recommend/ncf/README.md index 0974f1f23b..8e55d4add5 100644 --- a/model_zoo/official/recommend/ncf/README.md +++ b/model_zoo/official/recommend/ncf/README.md @@ -243,8 +243,7 @@ https://www.mindspore.cn/tutorial/zh-CN/master/use/multi_platform_inference.html net = GoogleNet(num_classes=cfg.num_classes) opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, cfg.momentum, weight_decay=cfg.weight_decay) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', - is_grad=False) + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}) # Load pre-trained model @@ -275,7 +274,7 @@ https://www.mindspore.cn/tutorial/zh-CN/master/use/multi_platform_inference.html steps_per_epoch=batch_num) opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay) - loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False) + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'}, amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None) diff --git a/model_zoo/official/recommend/ncf/src/ncf.py b/model_zoo/official/recommend/ncf/src/ncf.py index d496181699..5a93918d88 100644 --- a/model_zoo/official/recommend/ncf/src/ncf.py +++ b/model_zoo/official/recommend/ncf/src/ncf.py @@ -199,7 +199,7 @@ class NetWithLossClass(nn.Cell): """ def __init__(self, network): super(NetWithLossClass, self).__init__(auto_prefix=False) - #self.loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + #self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) self.network = network self.reducesum = P.ReduceSum(keep_dims=False) diff --git a/model_zoo/research/cv/ghostnet/eval.py b/model_zoo/research/cv/ghostnet/eval.py index 7317934ec0..ad3247d565 100644 --- a/model_zoo/research/cv/ghostnet/eval.py +++ b/model_zoo/research/cv/ghostnet/eval.py @@ -50,8 +50,7 @@ if __name__ == '__main__': else: raise ValueError("Unsupport platform.") - loss = nn.SoftmaxCrossEntropyWithLogits( - is_grad=False, sparse=True, reduction='mean') + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') if args_opt.model == 'ghostnet': net = ghostnet_1x(num_classes=config_platform.num_classes) diff --git a/model_zoo/research/cv/ghostnet_quant/eval.py b/model_zoo/research/cv/ghostnet_quant/eval.py index 6aa301344a..c51af62099 100644 --- a/model_zoo/research/cv/ghostnet_quant/eval.py +++ b/model_zoo/research/cv/ghostnet_quant/eval.py @@ -49,8 +49,7 @@ if __name__ == '__main__': else: raise ValueError("Unsupport platform.") - loss = nn.SoftmaxCrossEntropyWithLogits( - is_grad=False, sparse=True, reduction='mean') + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') net = ghostnet_1x(num_classes=config_platform.num_classes) diff --git a/model_zoo/research/cv/resnet50_adv_pruning/eval.py b/model_zoo/research/cv/resnet50_adv_pruning/eval.py index ef76a90d15..7d8e33cb47 100644 --- a/model_zoo/research/cv/resnet50_adv_pruning/eval.py +++ b/model_zoo/research/cv/resnet50_adv_pruning/eval.py @@ -53,8 +53,7 @@ if __name__ == '__main__': else: raise ValueError("Unsupport platform.") - loss = nn.SoftmaxCrossEntropyWithLogits( - is_grad=False, sparse=True, reduction='mean') + loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') if args_opt.platform == "Ascend": net.to_float(mstype.float16) diff --git a/tests/st/networks/models/resnet50/src_thor/model_thor.py b/tests/st/networks/models/resnet50/src_thor/model_thor.py index 617d7ff812..6c455effa3 100644 --- a/tests/st/networks/models/resnet50/src_thor/model_thor.py +++ b/tests/st/networks/models/resnet50/src_thor/model_thor.py @@ -128,7 +128,7 @@ class Model: >>> return out >>> >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None) >>> dataset = get_dataset() @@ -295,7 +295,7 @@ class Model: >>> train_dataset = get_train_dataset() >>> valid_dataset = get_valid_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={'acc'}) >>> model.init(train_dataset, valid_dataset) @@ -566,7 +566,7 @@ class Model: Examples: >>> dataset = get_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> loss_scale_manager = FixedLossScaleManager() >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager) @@ -678,7 +678,7 @@ class Model: Examples: >>> dataset = get_dataset() >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'}) >>> model.eval(dataset) """