diff --git a/mindspore/nn/layer/pooling.py b/mindspore/nn/layer/pooling.py index 2c13957636..21f49101a5 100644 --- a/mindspore/nn/layer/pooling.py +++ b/mindspore/nn/layer/pooling.py @@ -334,7 +334,7 @@ class AvgPool1d(_PoolNd): Tensor of shape :math:`(N, C_{out}, L_{out})`. Examples: - >>> pool = nn.AvgPool1d(kernel_size=6, strides=1) + >>> pool = nn.AvgPool1d(kernel_size=6, stride=1) >>> x = Tensor(np.random.randint(0, 10, [1, 3, 6]), mindspore.float32) >>> output = pool(x) >>> output.shape diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index cb3f0580f0..dbc8f9b8a9 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -1376,8 +1376,9 @@ class ArgMinWithValue(PrimitiveWithInfer): - output_x (Tensor) - The minimum value of input tensor, with the same shape as index. Examples: - >>> input_x = Tensor(np.random.rand(5)) + >>> input_x = Tensor(np.random.rand(5), mindspore.float32) >>> index, output = P.ArgMinWithValue()(input_x) + 0 0.0496291 """ @prim_attr_register diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 2a011f9d31..399e5ce73f 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -5740,9 +5740,13 @@ class LRN(PrimitiveWithInfer): Tensor, with the same shape and data type as the input tensor. Examples: - >>> x = Tensor(np.random.rand(1, 10, 4, 4)), mindspore.float32) + >>> x = Tensor(np.random.rand(1, 2, 2, 2), mindspore.float32) >>> lrn = P.LRN() >>> lrn(x) + [[[[0.18990143 0.59475636] + [0.6291904 0.1371534 ]] + [[0.6258911 0.4964315 ] + [0.3141494 0.43636137]]]] """ @prim_attr_register def __init__(self, depth_radius=5, bias=1.0, alpha=1.0, beta=0.5, norm_region="ACROSS_CHANNELS"):