From e0da1539b2c47ecc0f6dec742dfe0ffdb056d1c1 Mon Sep 17 00:00:00 2001 From: zhangz0911gm Date: Wed, 18 Nov 2020 15:12:53 +0800 Subject: [PATCH] Updating remaining notes --- mindspore/ops/operations/_inner_ops.py | 6 ++++-- mindspore/ops/operations/array_ops.py | 9 ++++++--- mindspore/ops/operations/math_ops.py | 12 ++++++++---- mindspore/ops/operations/nn_ops.py | 7 +++++-- mindspore/ops/operations/random_ops.py | 12 ++++++++---- 5 files changed, 31 insertions(+), 15 deletions(-) diff --git a/mindspore/ops/operations/_inner_ops.py b/mindspore/ops/operations/_inner_ops.py index cc87c4dd1a..d4db638029 100644 --- a/mindspore/ops/operations/_inner_ops.py +++ b/mindspore/ops/operations/_inner_ops.py @@ -132,7 +132,8 @@ class Range(PrimitiveWithInfer): Examples: >>> range = P.Range(1.0, 8.0, 2.0) >>> x = Tensor(np.array([1, 2, 3, 2]), mindspore.int32) - >>> range(x) + >>> output = range(x) + >>> print(output) [3, 5, 7, 5] """ @@ -524,7 +525,8 @@ class DynamicGRUV2(PrimitiveWithInfer): >>> init_h = Tensor(np.random.rand(8, 16).astype(np.float16)) >>> dynamic_gru_v2 = P.DynamicGRUV2() >>> output = dynamic_gru_v2(x, weight_i, weight_h, bias_i, bias_h, None, init_h) - >>> output[0].shape + >>> result = output[0].shape + >>> print(result) (2, 8, 16) """ diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index 4a871f6fc7..888666be8e 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -1072,7 +1072,8 @@ class Ones(PrimitiveWithInfer): Examples: >>> ones = P.Ones() - >>> Ones((2, 2), mindspore.float32) + >>> output = Ones((2, 2), mindspore.float32) + >>> print(output) [[1.0, 1.0], [1.0, 1.0]] """ @@ -1115,7 +1116,8 @@ class Zeros(PrimitiveWithInfer): Examples: >>> zeros = P.Zeros() - >>> Zeros((2, 2), mindspore.float32) + >>> output = Zeros((2, 2), mindspore.float32) + >>> print(output) [[0.0, 0.0], [0.0, 0.0]] @@ -2615,7 +2617,8 @@ class Diag(PrimitiveWithInfer): Examples: >>> input_x = Tensor([1, 2, 3, 4]) >>> diag = P.Diag() - >>> diag(input_x) + >>> output = diag(input_x) + >>> print(output) [[1, 0, 0, 0], [0, 2, 0, 0], [0, 0, 3, 0], diff --git a/mindspore/ops/operations/math_ops.py b/mindspore/ops/operations/math_ops.py index 8b77c09964..8c9578a889 100644 --- a/mindspore/ops/operations/math_ops.py +++ b/mindspore/ops/operations/math_ops.py @@ -348,7 +348,8 @@ class ReduceMean(_Reduce): >>> input_x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32)) >>> op = P.ReduceMean(keep_dims=True) >>> output = op(input_x, 1) - >>> output.shape + >>> result = output.shape + >>> print(result) (3, 1, 5, 6) """ @@ -501,7 +502,8 @@ class ReduceMax(_Reduce): >>> input_x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32)) >>> op = P.ReduceMax(keep_dims=True) >>> output = op(input_x, 1) - >>> output.shape + >>> result = output.shape + >>> print(result) (3, 1, 5, 6) """ @@ -545,7 +547,8 @@ class ReduceMin(_Reduce): >>> input_x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32)) >>> op = P.ReduceMin(keep_dims=True) >>> output = op(input_x, 1) - >>> output.shape + >>> result = output.shape + >>> print(result) (3, 1, 5, 6) """ @@ -580,7 +583,8 @@ class ReduceProd(_Reduce): >>> input_x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32)) >>> op = P.ReduceProd(keep_dims=True) >>> output = op(input_x, 1) - >>> output.shape + >>> reuslt = output.shape + >>> print(result) (3, 1, 5, 6) """ diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 62c8ba7b61..b7f18b2a9b 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -661,7 +661,8 @@ class FusedBatchNorm(Primitive): >>> input_x = Tensor(np.ones([128, 64, 32, 64]), mindspore.float32) >>> net = FusedBatchNormNet() >>> output = net(input_x) - >>> output[0].shape + >>> result = output[0].shape + >>> print(result) (128, 64, 32, 64) """ __mindspore_signature__ = ( @@ -754,7 +755,8 @@ class FusedBatchNormEx(PrimitiveWithInfer): >>> input_x = Tensor(np.ones([128, 64, 32, 64]), mindspore.float32) >>> net = FusedBatchNormExNet() >>> output = net(input_x) - >>> output[0].shape + >>> result = output[0].shape + >>> print(result) (128, 64, 32, 64) """ __mindspore_signature__ = ( @@ -1803,6 +1805,7 @@ class TopK(PrimitiveWithInfer): >>> input_x = Tensor([1, 2, 3, 4, 5], mindspore.float16) >>> k = 3 >>> values, indices = topk(input_x, k) + >>> print((values, indices)) ([5.0, 4.0, 3.0], [4, 3, 2]) """ diff --git a/mindspore/ops/operations/random_ops.py b/mindspore/ops/operations/random_ops.py index aa921b15f0..d25757cead 100644 --- a/mindspore/ops/operations/random_ops.py +++ b/mindspore/ops/operations/random_ops.py @@ -38,7 +38,8 @@ class StandardNormal(PrimitiveWithInfer): >>> shape = (4, 16) >>> stdnormal = P.StandardNormal(seed=2) >>> output = stdnormal(shape) - >>> output.shape + >>> result = output.shape + >>> print(result) (4, 16) """ @@ -85,7 +86,8 @@ class StandardLaplace(PrimitiveWithInfer): >>> shape = (4, 16) >>> stdlaplace = P.StandardLaplace(seed=2) >>> output = stdlaplace(shape) - >>> output.shape + >>> result = output.shape + >>> print(result) (4, 16) """ @@ -354,9 +356,11 @@ class RandomChoiceWithMask(PrimitiveWithInfer): >>> rnd_choice_mask = P.RandomChoiceWithMask() >>> input_x = Tensor(np.ones(shape=[240000, 4]).astype(np.bool)) >>> output_y, output_mask = rnd_choice_mask(input_x) - >>> output_y.shape + >>> result = output_y.shape + >>> print(result) (256, 2) - >>> output_mask.shape + >>> result = output_mask.shape + >>> print(reuslt) (256,) """