From 639a085d93fa2b9849476bf6b548cdcc6b4c3c1f Mon Sep 17 00:00:00 2001 From: lihongkang <[lihongkang1@huawei.com]> Date: Wed, 21 Oct 2020 17:19:15 +0800 Subject: [PATCH] fix bugs --- mindspore/ops/operations/array_ops.py | 2 ++ mindspore/ops/operations/math_ops.py | 1 + mindspore/ops/operations/nn_ops.py | 1 + 3 files changed, 4 insertions(+) diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index 24c3784afa..51cd96c1b8 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -826,6 +826,7 @@ class Rank(PrimitiveWithInfer): >>> input_tensor = Tensor(np.array([[2, 2], [2, 2]]), mindspore.float32) >>> rank = P.Rank() >>> rank(input_tensor) + 2 """ @prim_attr_register @@ -896,6 +897,7 @@ class Size(PrimitiveWithInfer): >>> input_tensor = Tensor(np.array([[2, 2], [2, 2]]), mindspore.float32) >>> size = P.Size() >>> output = size(input_tensor) + 4 """ @prim_attr_register diff --git a/mindspore/ops/operations/math_ops.py b/mindspore/ops/operations/math_ops.py index e294bb92e4..3e123c6a7d 100644 --- a/mindspore/ops/operations/math_ops.py +++ b/mindspore/ops/operations/math_ops.py @@ -2938,6 +2938,7 @@ class Sin(PrimitiveWithInfer): >>> sin = P.Sin() >>> input_x = Tensor(np.array([0.62, 0.28, 0.43, 0.62]), mindspore.float32) >>> output = sin(input_x) + [0.5810352 0.27635565 0.41687083 0.5810352] """ @prim_attr_register diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 5bd8e0edca..cdcf3da1f2 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -1761,6 +1761,7 @@ class SparseSoftmaxCrossEntropyWithLogits(PrimitiveWithInfer): @prim_attr_register def __init__(self, is_grad=False): + validator.check_value_type('is_grad', is_grad, [bool], self.name) self.init_prim_io_names(inputs=['features', 'labels'], outputs=['output']) self.is_grad = is_grad self.add_prim_attr('sens', 1.0)