From b7d1c970e79da6e409c2cd2fd711609f81f575ad Mon Sep 17 00:00:00 2001 From: lihongkang <[lihongkang1@huawei.com]> Date: Sat, 14 Nov 2020 11:43:38 +0800 Subject: [PATCH] fix bugs --- mindspore/nn/optim/rmsprop.py | 2 +- mindspore/ops/operations/array_ops.py | 4 ++-- mindspore/ops/operations/nn_ops.py | 13 +++++++------ 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/mindspore/nn/optim/rmsprop.py b/mindspore/nn/optim/rmsprop.py index a6e77fd848..dfb6285bd8 100644 --- a/mindspore/nn/optim/rmsprop.py +++ b/mindspore/nn/optim/rmsprop.py @@ -133,7 +133,7 @@ class RMSProp(Optimizer): Examples: >>> net = Net() >>> #1) All parameters use the same learning rate and weight decay - >>> optim = nn.RMSProp(params=net.trainable_params(), learning_rate=lr) + >>> optim = nn.RMSProp(params=net.trainable_params(), learning_rate=0.1) >>> >>> #2) Use parameter groups and set different values >>> conv_params = list(filter(lambda x: 'conv' in x.name, net.trainable_params())) diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index f5b7379430..06d52b805d 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -379,7 +379,7 @@ class IsInstance(PrimitiveWithInfer): Examples: >>> a = 1 - >>> result = P.IsInstance()(a, mindspore.int32) + >>> result = P.IsInstance()(a, mindspore.int64) >>> print(result) True """ @@ -1640,7 +1640,7 @@ class Tile(PrimitiveWithInfer): x_shp = x['shape'] validator.check_value_type("multiples", multiples_v, [tuple], self.name) for i, multiple in enumerate(multiples_v): - validator.check_value_type("multiples[%d]" % i, multiple, [int], self.name) + validator.check_positive_int(multiple, "multiples[%d]" % i, self.name) validator.check_value_type("x[\'dtype\']", x["dtype"], mstype.tensor_type, self.name) len_sub = len(multiples_v) - len(x_shp) multiples_w = None diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index c00a780274..467eb0b212 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -3037,11 +3037,12 @@ class Pad(PrimitiveWithInfer): >>> input_tensor = Tensor(np.array([[-0.1, 0.3, 3.6], [0.4, 0.5, -3.2]]), mindspore.float32) >>> pad_op = P.Pad(((1, 2), (2, 1))) >>> output_tensor = pad_op(input_tensor) - >>> assert output_tensor == Tensor(np.array([[ 0. , 0. , 0. , 0. , 0. , 0. ], - >>> [ 0. , 0. , -0.1, 0.3, 3.6, 0. ], - >>> [ 0. , 0. , 0.4, 0.5, -3.2, 0. ], - >>> [ 0. , 0. , 0. , 0. , 0. , 0. ], - >>> [ 0. , 0. , 0. , 0. , 0. , 0. ]]), mindspore.float32) + >>> print(output_tensor) + [[ 0. 0. 0. 0. 0. 0. ] + [ 0. 0. -0.1 0.3 3.6 0. ] + [ 0. 0. 0.4 0.5 -3.2 0. ] + [ 0. 0. 0. 0. 0. 0. ] + [ 0. 0. 0. 0. 0. 0. ]] """ @prim_attr_register @@ -4857,7 +4858,7 @@ class ApplyPowerSign(PrimitiveWithInfer): >>> self.beta = 0.9 >>> def construct(self, grad): >>> out = self.apply_power_sign(self.var, self.m, self.lr, self.logbase, - >>> self.sign_decay, self.beta, grad) + ... self.sign_decay, self.beta, grad) >>> return out >>> net = Net() >>> grad = Tensor(np.random.rand(3, 3).astype(np.float32))