From 65d50861ec5ac2d50e5fe56758620eded8808d93 Mon Sep 17 00:00:00 2001 From: peixu_ren Date: Wed, 4 Nov 2020 10:38:53 -0500 Subject: [PATCH] Remove expm1_generic and log1p_generic from PP utils --- mindspore/nn/probability/bijector/power_transform.py | 6 +++--- mindspore/nn/probability/bijector/softplus.py | 4 ++-- mindspore/nn/probability/distribution/_utils/__init__.py | 2 -- mindspore/nn/probability/distribution/_utils/custom_ops.py | 7 ------- mindspore/nn/probability/distribution/gumbel.py | 4 ++-- mindspore/nn/probability/distribution/log_normal.py | 4 ++-- mindspore/nn/probability/distribution/logistic.py | 6 +++--- mindspore/nn/probability/distribution/normal.py | 4 ++-- .../python/nn/probability/distribution/test_lognormal.py | 5 +++-- 9 files changed, 17 insertions(+), 25 deletions(-) diff --git a/mindspore/nn/probability/bijector/power_transform.py b/mindspore/nn/probability/bijector/power_transform.py index edd5901011..3c66e16ed3 100644 --- a/mindspore/nn/probability/bijector/power_transform.py +++ b/mindspore/nn/probability/bijector/power_transform.py @@ -15,7 +15,7 @@ """Power Bijector""" from mindspore.ops import operations as P from ..distribution._utils.utils import check_greater_equal_zero -from ..distribution._utils.custom_ops import exp_generic, expm1_generic, log_generic, log1p_generic +from ..distribution._utils.custom_ops import exp_generic, log_generic from .bijector import Bijector @@ -72,9 +72,9 @@ class PowerTransform(Bijector): self.dtypeop = P.DType() self.cast = P.Cast() self.exp = exp_generic - self.expm1 = expm1_generic + self.expm1 = P.Expm1() self.log = log_generic - self.log1p = log1p_generic + self.log1p = P.Log1p() @property def power(self): diff --git a/mindspore/nn/probability/bijector/softplus.py b/mindspore/nn/probability/bijector/softplus.py index 0e9a6b19bc..bad29e7fe1 100644 --- a/mindspore/nn/probability/bijector/softplus.py +++ b/mindspore/nn/probability/bijector/softplus.py @@ -16,7 +16,7 @@ import numpy as np from mindspore.ops import operations as P from mindspore.nn.layer.activation import LogSigmoid -from ..distribution._utils.custom_ops import exp_generic, expm1_generic, log_generic +from ..distribution._utils.custom_ops import exp_generic, log_generic from .bijector import Bijector @@ -65,7 +65,7 @@ class Softplus(Bijector): self.exp = exp_generic self.log = log_generic - self.expm1 = expm1_generic + self.expm1 = P.Expm1() self.abs = P.Abs() self.dtypeop = P.DType() self.cast = P.Cast() diff --git a/mindspore/nn/probability/distribution/_utils/__init__.py b/mindspore/nn/probability/distribution/_utils/__init__.py index f4d66d8336..c54399b623 100644 --- a/mindspore/nn/probability/distribution/_utils/__init__.py +++ b/mindspore/nn/probability/distribution/_utils/__init__.py @@ -25,9 +25,7 @@ __all__ = [ 'check_greater_zero', 'check_prob', 'exp_generic', - 'expm1_generic', 'log_generic', - 'log1p_generic', 'broadcast_to', 'set_param_type', 'CheckTensor', diff --git a/mindspore/nn/probability/distribution/_utils/custom_ops.py b/mindspore/nn/probability/distribution/_utils/custom_ops.py index 3bc7c3e0fd..a4da351302 100644 --- a/mindspore/nn/probability/distribution/_utils/custom_ops.py +++ b/mindspore/nn/probability/distribution/_utils/custom_ops.py @@ -32,13 +32,6 @@ def exp_generic(input_x): return exp(input_x) -def expm1_generic(input_x): - """ - Expm1 ops under GPU context. - """ - return exp_generic(input_x) - 1.0 - - def log_generic(input_x): """ Log op on Ascend is calculated as log(abs(x)). diff --git a/mindspore/nn/probability/distribution/gumbel.py b/mindspore/nn/probability/distribution/gumbel.py index 97f2b46f20..c57d1c0e65 100644 --- a/mindspore/nn/probability/distribution/gumbel.py +++ b/mindspore/nn/probability/distribution/gumbel.py @@ -22,7 +22,7 @@ import mindspore.nn.probability.bijector as msb import mindspore.nn.probability.distribution as msd from .transformed_distribution import TransformedDistribution from ._utils.utils import check_distribution_name -from ._utils.custom_ops import exp_generic, expm1_generic, log_generic +from ._utils.custom_ops import exp_generic, log_generic class Gumbel(TransformedDistribution): """ @@ -120,7 +120,7 @@ class Gumbel(TransformedDistribution): self.cast = P.Cast() self.const = P.ScalarToArray() self.exp = exp_generic - self.expm1 = expm1_generic + self.expm1 = P.Expm1() self.fill = P.Fill() self.lgamma = nn.LGamma() self.log = log_generic diff --git a/mindspore/nn/probability/distribution/log_normal.py b/mindspore/nn/probability/distribution/log_normal.py index c82e79f75c..57086d6f37 100644 --- a/mindspore/nn/probability/distribution/log_normal.py +++ b/mindspore/nn/probability/distribution/log_normal.py @@ -19,7 +19,7 @@ from mindspore.common import dtype as mstype import mindspore.nn.probability.bijector as msb import mindspore.nn.probability.distribution as msd from ._utils.utils import check_distribution_name -from ._utils.custom_ops import exp_generic, expm1_generic, log_generic +from ._utils.custom_ops import exp_generic, log_generic class LogNormal(msd.TransformedDistribution): """ @@ -146,7 +146,7 @@ class LogNormal(msd.TransformedDistribution): #ops needed for the class self.exp = exp_generic - self.expm1 = expm1_generic + self.expm1 = P.Expm1() self.log = log_generic self.const = P.ScalarToArray() self.erf = P.Erf() diff --git a/mindspore/nn/probability/distribution/logistic.py b/mindspore/nn/probability/distribution/logistic.py index 1033f4de95..a0ec635fd0 100644 --- a/mindspore/nn/probability/distribution/logistic.py +++ b/mindspore/nn/probability/distribution/logistic.py @@ -20,7 +20,7 @@ from mindspore._checkparam import Validator from mindspore.common import dtype as mstype from .distribution import Distribution from ._utils.utils import check_greater_zero -from ._utils.custom_ops import exp_generic, expm1_generic, log_generic, log1p_generic +from ._utils.custom_ops import exp_generic, log_generic class Logistic(Distribution): @@ -124,11 +124,11 @@ class Logistic(Distribution): self.const = P.ScalarToArray() self.dtypeop = P.DType() self.exp = exp_generic - self.expm1 = expm1_generic + self.expm1 = P.Expm1() self.fill = P.Fill() self.less = P.Less() self.log = log_generic - self.log1p = log1p_generic + self.log1p = P.Log1p() self.logicalor = P.LogicalOr() self.erf = P.Erf() self.greater = P.Greater() diff --git a/mindspore/nn/probability/distribution/normal.py b/mindspore/nn/probability/distribution/normal.py index 6a4949084e..a07679a365 100644 --- a/mindspore/nn/probability/distribution/normal.py +++ b/mindspore/nn/probability/distribution/normal.py @@ -20,7 +20,7 @@ from mindspore._checkparam import Validator from mindspore.common import dtype as mstype from .distribution import Distribution from ._utils.utils import check_greater_zero, check_distribution_name -from ._utils.custom_ops import exp_generic, expm1_generic, log_generic +from ._utils.custom_ops import exp_generic, log_generic class Normal(Distribution): @@ -137,7 +137,7 @@ class Normal(Distribution): # ops needed for the class self.exp = exp_generic - self.expm1 = expm1_generic + self.expm1 = P.Expm1() self.log = log_generic self.erf = P.Erf() self.squeeze = P.Squeeze(0) diff --git a/tests/ut/python/nn/probability/distribution/test_lognormal.py b/tests/ut/python/nn/probability/distribution/test_lognormal.py index f351fa310f..e0ae671cfb 100644 --- a/tests/ut/python/nn/probability/distribution/test_lognormal.py +++ b/tests/ut/python/nn/probability/distribution/test_lognormal.py @@ -175,15 +175,16 @@ class LogNormalBasics(nn.Cell): def construct(self): mean = self.n.mean() - sd = self.n.sd() mode = self.n.mode() entropy = self.n.entropy() - return mean + sd + mode + entropy + return mean + mode + entropy def test_bascis(): """ Test mean/sd/mode/entropy functionality of LogNormal. """ + from mindspore import context + context.set_context(device_target="Ascend") net = LogNormalBasics() ans = net() assert isinstance(ans, Tensor)