From 9b45018dfda3fb47f32703056c25d0af6aff022a Mon Sep 17 00:00:00 2001 From: peixu_ren Date: Sun, 19 Jul 2020 19:19:25 -0700 Subject: [PATCH] Add random normal op at MindSpore front-end --- mindspore/nn/distribution/bernoulli.py | 5 +- mindspore/nn/distribution/normal.py | 5 +- mindspore/ops/composite/__init__.py | 2 + mindspore/ops/composite/random_ops.py | 63 ++++++++++++++ mindspore/ops/operations/__init__.py | 4 +- mindspore/ops/operations/random_ops.py | 85 +++++++++---------- .../ops/ascend/test_aicpu_ops/test_normal.py | 2 - .../test_aicpu_ops/test_standard_normal.py | 6 -- tests/st/ops/gpu/test_normal.py | 56 ++++++++++++ tests/st/ops/gpu/test_standard_normal.py | 41 +++++++++ tests/ut/python/ops/test_ops.py | 4 +- 11 files changed, 214 insertions(+), 59 deletions(-) create mode 100644 mindspore/ops/composite/random_ops.py create mode 100644 tests/st/ops/gpu/test_normal.py create mode 100644 tests/st/ops/gpu/test_standard_normal.py diff --git a/mindspore/nn/distribution/bernoulli.py b/mindspore/nn/distribution/bernoulli.py index d0d8a5b08a..9aa20d668f 100644 --- a/mindspore/nn/distribution/bernoulli.py +++ b/mindspore/nn/distribution/bernoulli.py @@ -14,6 +14,7 @@ # ============================================================================ """Bernoulli Distribution""" from mindspore.ops import operations as P +from mindspore.ops import composite as C from .distribution import Distribution from ._utils.utils import cast_to_tensor, check_prob from ...common import dtype as mstype @@ -53,6 +54,7 @@ class Bernoulli(Distribution): check_prob(self._probs) else: self._probs = probs + self.seed = seed # ops needed for the class self.log = P.Log() @@ -64,7 +66,6 @@ class Bernoulli(Distribution): self.const = P.ScalarToArray() self.less = P.Less() self.cast = P.Cast() - self.normal = P.Normal(seed=seed) self.erf = P.Erf() self.sqrt = P.Sqrt() @@ -159,7 +160,7 @@ class Bernoulli(Distribution): mean_zero = self.const(0.0) sd_one = self.const(1.0) sqrt_two = self.sqrt(self.const(2.0)) - sample_norm = self.normal(sample_shape, mean_zero, sd_one) + sample_norm = C.normal(sample_shape, mean_zero, sd_one, self.seed) sample_uniform = 0.5 * (1 + self.erf(self.realdiv(sample_norm, sqrt_two))) sample = self.less(sample_uniform, probs1) sample = self.cast(sample, self._dtype) diff --git a/mindspore/nn/distribution/normal.py b/mindspore/nn/distribution/normal.py index 344dbd2eeb..61cec6d810 100644 --- a/mindspore/nn/distribution/normal.py +++ b/mindspore/nn/distribution/normal.py @@ -15,6 +15,7 @@ """Normal Distribution""" import numpy as np from mindspore.ops import operations as P +from mindspore.ops import composite as C from .distribution import Distribution from ._utils.utils import convert_to_batch, check_greater_equal_zero from ...common import dtype as mstype @@ -60,6 +61,7 @@ class Normal(Distribution): else: self._mean_value = mean self._sd_value = sd + self.seed = seed #ops needed for the class self.exp = P.Exp() @@ -70,7 +72,6 @@ class Normal(Distribution): self.sqrt = P.Sqrt() self.realdiv = P.RealDiv() self.expm1 = P.Expm1() if get_context('device_target') == 'Ascend' else self._expm1_by_step - self.normal = P.Normal(seed=seed) self.shape = P.Shape() self.zeroslike = P.ZerosLike() self.const = P.ScalarToArray() @@ -163,7 +164,7 @@ class Normal(Distribution): sample_shape = shape + batch_shape mean_zero = self.const(0.0) sd_one = self.const(1.0) - sample_norm = self.normal(sample_shape, mean_zero, sd_one) + sample_norm = C.normal(sample_shape, mean_zero, sd_one, self.seed) sample = self.add(mean, self.mul(sample_norm, sd)) return sample return None diff --git a/mindspore/ops/composite/__init__.py b/mindspore/ops/composite/__init__.py index 6db8d666a2..bb5e2960ff 100644 --- a/mindspore/ops/composite/__init__.py +++ b/mindspore/ops/composite/__init__.py @@ -27,6 +27,7 @@ from .clip_ops import clip_by_value from .multitype_ops.add_impl import hyper_add from .multitype_ops.ones_like_impl import ones_like from .multitype_ops.zeros_like_impl import zeros_like +from .random_ops import normal __all__ = [ @@ -47,4 +48,5 @@ __all__ = [ 'zeros_like', 'ones_like', 'zip_operation', + 'normal', 'clip_by_value',] diff --git a/mindspore/ops/composite/random_ops.py b/mindspore/ops/composite/random_ops.py new file mode 100644 index 0000000000..db338f5672 --- /dev/null +++ b/mindspore/ops/composite/random_ops.py @@ -0,0 +1,63 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""Operations for random number generatos.""" + +from mindspore.ops.primitive import constexpr +from .. import operations as P + +# set graph-level RNG seed +_GRAPH_SEED = 0 + +@constexpr +def set_seed(seed): + global _GRAPH_SEED + _GRAPH_SEED = seed + +@constexpr +def get_seed(): + return _GRAPH_SEED + + +def normal(shape, mean, stddev, seed): + """ + Generates random numbers according to the Normal (or Gaussian) random number distribution. + It is defined as: + + Args: + - **shape** (tuple) - The shape of random tensor to be generated. + - **mean** (Tensor) - The mean μ distribution parameter, which specifies the location of the peak. + With float32 data type. + - **stddev** (Tensor) - The deviation σ distribution parameter. With float32 data type. + - **seed** (int): Seed is used as entropy source for Random number engines generating pseudo-random numbers. + Default: 0. + + Returns: + Tensor. The shape should be the broadcasted shape of Input "shape" and shapes of mean and stddev. + The dtype is float32. + + Examples: + >>> shape = (4, 16) + >>> mean = Tensor(1.0, mstype.float32) + >>> stddev = Tensor(1.0, mstype.float32) + >>> output = C.normal(shape, mean, stddev, seed=5) + """ + set_seed(10) + seed1 = get_seed() + seed2 = seed + stdnormal = P.StandardNormal(seed1, seed2) + rnd = stdnormal(shape) + value = rnd * stddev + mean + return value diff --git a/mindspore/ops/operations/__init__.py b/mindspore/ops/operations/__init__.py index 2da750753b..3606933cba 100644 --- a/mindspore/ops/operations/__init__.py +++ b/mindspore/ops/operations/__init__.py @@ -54,7 +54,7 @@ from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AccumulateNV2, AssignAdd, A Sin, Sqrt, Rsqrt, BesselI0e, BesselI1e, TruncateDiv, TruncateMod, Square, Sub, TensorAdd, Sign, Round, SquareSumAll, Atan, Atanh, Cosh, Sinh, Eps, Tan) -from .random_ops import (RandomChoiceWithMask, Normal, Gamma, Poisson, UniformInt, UniformReal, +from .random_ops import (RandomChoiceWithMask, StandardNormal, Gamma, Poisson, UniformInt, UniformReal, RandomCategorical, Laplace) from .nn_ops import (LSTM, SGD, Adam, FusedSparseAdam, FusedSparseLazyAdam, ApplyMomentum, BatchNorm, BiasAdd, Conv2D, @@ -174,7 +174,7 @@ __all__ = [ 'HSigmoid', 'Tanh', 'RandomChoiceWithMask', - 'Normal', + 'StandardNormal', 'Gamma', 'Poisson', 'UniformInt', diff --git a/mindspore/ops/operations/random_ops.py b/mindspore/ops/operations/random_ops.py index 6ce58ebd1a..065c4eaf27 100644 --- a/mindspore/ops/operations/random_ops.py +++ b/mindspore/ops/operations/random_ops.py @@ -22,6 +22,48 @@ from ..primitive import PrimitiveWithInfer, prim_attr_register from .._utils import get_broadcast_shape +class StandardNormal(PrimitiveWithInfer): + r""" + Generates random numbers according to the standard Normal (or Gaussian) random number distribution. + + Args: + seed (int): Random seed. Default: 0. + seed2 (int): Random seed2. Default: 0. + + Inputs: + - **shape** (tuple) - The shape of random tensor to be generated. Only constant value is allowed. + + Outputs: + Tensor. The shape should be the broadcasted shape of Input "shape" and shapes of mean and stddev. + The dtype is float32. + + Examples: + >>> shape = (4, 16) + >>> stdnormal = P.StandardNormal(seed=2) + >>> output = stdnormal(shape) + """ + + @prim_attr_register + def __init__(self, seed=0, seed2=0): + """Init StandardNormal""" + self.init_prim_io_names(inputs=['shape'], outputs=['output']) + validator.check_value_type('seed', seed, [int], self.name) + validator.check_value_type('seed2', seed2, [int], self.name) + + def __infer__(self, shape): + shape_v = shape["value"] + if shape_v is None: + raise ValueError(f"For {self.name}, shape must be const.") + validator.check_value_type("shape", shape_v, [tuple], self.name) + for i, shape_i in enumerate(shape_v): + validator.check_integer("shape[%d]" % i, shape_i, 0, Rel.GT, self.name) + out = { + 'shape': shape_v, + 'dtype': mstype.float32, + 'value': None} + return out + + class Laplace(PrimitiveWithInfer): r""" Generates random numbers according to the Laplace random number distribution. @@ -393,46 +435,3 @@ class RandomCategorical(PrimitiveWithInfer): return {'shape': (x_shape), 'dtype': (self.dtype), 'value': None} - -class Normal(PrimitiveWithInfer): - """ - Generates random samples from a normal(Gaussian) distribution. - - Args: - seed (int): Random seed. Default: 0. - - Inputs: - - **shape** (tuple[int]) - The shape of output tensor. Only constant value is allowed. - - **mean** (Tensor) - The mean of the distribution, with float32 data type. - - **stddev** (Tensor) - The standard deviation of the distribution, with float32 data type. - - Outputs: - Tensor, with the given shape from the specific distribution and float32 data type. - - Examples: - >>> normal = P.Normal() - >>> mean = Tensor(0., mstype.float32) - >>> stddev = Tensor(1., mstype.float32) - >>> out = normal((32, 3, 3), mean, stddev) - """ - - @prim_attr_register - def __init__(self, seed=0): - """Init Normal""" - validator.check_value_type("seed", seed, [int], self.name) - - def __infer__(self, shape, mean, stddev): - shape_value = shape["value"] - if shape_value is None: - raise ValueError(f"For {self.name}, shape must be const.") - validator.check_value_type("shape", shape_value, [tuple], self.name) - for i, shape_i in enumerate(shape_value): - validator.check_integer("shape[%d]" % i, shape_i, 0, Rel.GE, self.name) - - validator.check_tensor_type_same({"mean": mean["dtype"]}, [mstype.float32], self.name) - validator.check_tensor_type_same({"stddev": stddev["dtype"]}, [mstype.float32], self.name) - - out = {"shape": shape_value, - "dtype": mstype.float32, - "value": None} - return out diff --git a/tests/st/ops/ascend/test_aicpu_ops/test_normal.py b/tests/st/ops/ascend/test_aicpu_ops/test_normal.py index a92664f589..346fb1a655 100644 --- a/tests/st/ops/ascend/test_aicpu_ops/test_normal.py +++ b/tests/st/ops/ascend/test_aicpu_ops/test_normal.py @@ -43,7 +43,6 @@ def test_net_1D(): net = Net(shape, seed) tmean, tstddev = Tensor(mean, mstype.float32), Tensor(stddev, mstype.float32) output = net(tmean, tstddev) - print(output.asnumpy()) assert output.shape == (3, 2, 4) @@ -55,5 +54,4 @@ def test_net_ND(): net = Net(shape, seed) tmean, tstddev = Tensor(mean, mstype.float32), Tensor(stddev, mstype.float32) output = net(tmean, tstddev) - print(output.asnumpy()) assert output.shape == (3, 2, 2) diff --git a/tests/st/ops/ascend/test_aicpu_ops/test_standard_normal.py b/tests/st/ops/ascend/test_aicpu_ops/test_standard_normal.py index 5cc21fac80..847e3e623a 100644 --- a/tests/st/ops/ascend/test_aicpu_ops/test_standard_normal.py +++ b/tests/st/ops/ascend/test_aicpu_ops/test_standard_normal.py @@ -13,13 +13,8 @@ # limitations under the License. # ============================================================================ -import numpy as np -import pytest - import mindspore.context as context import mindspore.nn as nn -from mindspore import Tensor -from mindspore.common import dtype as mstype from mindspore.ops import operations as P context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") @@ -43,5 +38,4 @@ def test_net(): shape = (3, 2, 4) net = Net(shape, seed, seed2) output = net() - print(output.asnumpy()) assert output.shape == (3, 2, 4) diff --git a/tests/st/ops/gpu/test_normal.py b/tests/st/ops/gpu/test_normal.py new file mode 100644 index 0000000000..0c4866f6f0 --- /dev/null +++ b/tests/st/ops/gpu/test_normal.py @@ -0,0 +1,56 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import numpy as np + +import mindspore.context as context +import mindspore.nn as nn +from mindspore import Tensor +from mindspore.common import dtype as mstype +from mindspore.ops import composite as C + +context.set_context(mode=context.GRAPH_MODE, device_target="GPU") + + +class Net(nn.Cell): + def __init__(self, shape, seed=0): + super(Net, self).__init__() + self.shape = shape + self.seed = seed + + def construct(self, mean, stddev): + return C.normal(self.shape, mean, stddev, self.seed) + + +def test_net_1D(): + seed = 10 + shape = (3, 2, 4) + mean = 1.0 + stddev = 1.0 + net = Net(shape, seed) + tmean, tstddev = Tensor(mean, mstype.float32), Tensor(stddev, mstype.float32) + output = net(tmean, tstddev) + assert output.shape == (3, 2, 4) + + +def test_net_ND(): + seed = 10 + shape = (3, 1, 2) + mean = np.array([[[1], [2]], [[3], [4]], [[5], [6]]]).astype(np.float32) + stddev = np.array([1.0]).astype(np.float32) + net = Net(shape, seed) + tmean, tstddev = Tensor(mean, mstype.float32), Tensor(stddev, mstype.float32) + output = net(tmean, tstddev) + assert output.shape == (3, 2, 2) diff --git a/tests/st/ops/gpu/test_standard_normal.py b/tests/st/ops/gpu/test_standard_normal.py new file mode 100644 index 0000000000..dd89848c93 --- /dev/null +++ b/tests/st/ops/gpu/test_standard_normal.py @@ -0,0 +1,41 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import mindspore.context as context +import mindspore.nn as nn +from mindspore.ops import operations as P + +context.set_context(mode=context.GRAPH_MODE, device_target="GPU") + + +class Net(nn.Cell): + def __init__(self, shape, seed=0, seed2=0): + super(Net, self).__init__() + self.shape = shape + self.seed = seed + self.seed2 = seed2 + self.stdnormal = P.StandardNormal(seed, seed2) + + def construct(self): + return self.stdnormal(self.shape, self.seed, self.seed2) + + +def test_net(): + seed = 10 + seed2 = 10 + shape = (3, 2, 4) + net = Net(shape, seed, seed2) + output = net() + assert output.shape == (3, 2, 4) diff --git a/tests/ut/python/ops/test_ops.py b/tests/ut/python/ops/test_ops.py index 8972e4fd69..7d2c7ff10a 100755 --- a/tests/ut/python/ops/test_ops.py +++ b/tests/ut/python/ops/test_ops.py @@ -533,10 +533,10 @@ class NormalNet(nn.Cell): def __init__(self, shape=None, seed=0): super(NormalNet, self).__init__() self.shape = shape - self.normal = P.Normal(seed=seed) + self.seed = seed def construct(self, mean, stddev): - out = self.normal(self.shape, mean, stddev) + out = C.normal(self.shape, mean, stddev, self.seed) return out