From 1e51414f1b7418ff823ceb8304f1bd70f15558ac Mon Sep 17 00:00:00 2001 From: panbingao Date: Mon, 29 Jun 2020 11:10:24 +0800 Subject: [PATCH] Adapting operator Softsign in ME --- mindspore/ops/_grad/grad_nn_ops.py | 15 +++++++++++ mindspore/ops/_op_impl/tbe/__init__.py | 1 + mindspore/ops/_op_impl/tbe/softsign.py | 37 ++++++++++++++++++++++++++ mindspore/ops/operations/__init__.py | 3 ++- mindspore/ops/operations/nn_ops.py | 35 ++++++++++++++++++++++++ tests/ut/python/ops/test_ops.py | 4 +++ 6 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 mindspore/ops/_op_impl/tbe/softsign.py diff --git a/mindspore/ops/_grad/grad_nn_ops.py b/mindspore/ops/_grad/grad_nn_ops.py index 107de1768c..b34d452cba 100755 --- a/mindspore/ops/_grad/grad_nn_ops.py +++ b/mindspore/ops/_grad/grad_nn_ops.py @@ -336,6 +336,21 @@ def get_bprop_softplus(self): return bprop +@bprop_getters.register(P.Softsign) +def get_bprop_softsign(self): + """Grad definition for `Softsign` operation.""" + mul = P.Mul() + absolute = P.Abs() + div = P.Div() + square = P.Square() + + def bprop(x, out, dout): + dx = mul(dout, div(1, square(1 + absolute(x)))) + return (dx,) + + return bprop + + @bprop_getters.register(P.Tanh) def get_bprop_tanh(self): """Grad definition for `Tanh` operation.""" diff --git a/mindspore/ops/_op_impl/tbe/__init__.py b/mindspore/ops/_op_impl/tbe/__init__.py index fa2be6d515..a4f8aa42ed 100644 --- a/mindspore/ops/_op_impl/tbe/__init__.py +++ b/mindspore/ops/_op_impl/tbe/__init__.py @@ -122,6 +122,7 @@ from .round import _round_tbe from .tanh import _tanh_tbe from .tanh_grad import _tanh_grad_tbe from .softmax import _softmax_tbe +from .softsign import _softsign_tbe from .softplus import _softplus_tbe from .softplus_grad import _softplus_grad_tbe from .softmax_grad_ext import _softmax_grad_ext_tbe diff --git a/mindspore/ops/_op_impl/tbe/softsign.py b/mindspore/ops/_op_impl/tbe/softsign.py new file mode 100644 index 0000000000..9f1609bf0a --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/softsign.py @@ -0,0 +1,37 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""Softsign op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +softsign_op_info = TBERegOp("Softsign") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("softsign.so") \ + .compute_cost(10) \ + .kernel_name("softsign") \ + .partial_flag(True) \ + .op_pattern("formatAgnostic") \ + .input(0, "x", False, "required", "all") \ + .output(0, "y", False, "required", "all") \ + .dtype_format(DataType.F16_Default, DataType.F16_Default) \ + .dtype_format(DataType.F32_Default, DataType.F32_Default) \ + .get_op_info() + + +@op_info_register(softsign_op_info) +def _softsign_tbe(): + """Softsign TBE register""" + return diff --git a/mindspore/ops/operations/__init__.py b/mindspore/ops/operations/__init__.py index 06a19d2db7..b2d0fc7382 100644 --- a/mindspore/ops/operations/__init__.py +++ b/mindspore/ops/operations/__init__.py @@ -68,7 +68,7 @@ from .nn_ops import (LSTM, SGD, Adam, SparseApplyAdam, SparseApplyLazyAdam, Appl MaxPoolWithArgmax, OneHot, Pad, MirrorPad, PReLU, ReLU, ReLU6, ReLUV2, HSwish, HSigmoid, ResizeBilinear, Sigmoid, SigmoidCrossEntropyWithLogits, - SmoothL1Loss, Softmax, Softplus, LRN, + SmoothL1Loss, Softmax, Softsign, Softplus, LRN, SoftmaxCrossEntropyWithLogits, ROIAlign, SparseSoftmaxCrossEntropyWithLogits, Tanh, TopK, BinaryCrossEntropy, SparseApplyAdagrad, LARSUpdate, ApplyFtrl, SparseApplyFtrl, @@ -115,6 +115,7 @@ __all__ = [ 'SparseApplyLazyAdam', 'Softplus', 'Softmax', + 'Softsign', 'LogSoftmax', 'SoftmaxCrossEntropyWithLogits', 'ROIAlign', diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 6320e9e011..a1f5887217 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -224,6 +224,41 @@ class Softplus(PrimitiveWithInfer): return input_x +class Softsign(PrimitiveWithInfer): + r""" + Softsign activation function. + + The function is shown as follows: + + .. math:: + \text{output} = \frac{\text{input_x}}{1 + \abs{\text{input_x}}}, + + Inputs: + - **input_x** (Tensor) - The input tensor whose data type should be float. + + Outputs: + Tensor, with the same type and shape as the `input_x`. + + Examples: + >>> input_x = Tensor(np.array([0, -1, 2, 30, -30]), mindspore.float32) + >>> softsign = P.Softsign() + >>> softsign(input_x) + [0. -0.5 0.6666667 0.9677419 -0.9677419] + """ + + @prim_attr_register + def __init__(self): + """init Softsign""" + self.init_prim_io_names(inputs=['x'], outputs=['output']) + + def infer_shape(self, input_x): + return input_x + + def infer_dtype(self, input_x): + validator.check_tensor_type_same({'input_x': input_x}, mstype.float_type, self.name) + return input_x + + class ReLU(PrimitiveWithInfer): r""" Computes ReLU(Rectified Linear Unit) of input tensor element-wise. diff --git a/tests/ut/python/ops/test_ops.py b/tests/ut/python/ops/test_ops.py index f55d42e28b..a99b231fa7 100755 --- a/tests/ut/python/ops/test_ops.py +++ b/tests/ut/python/ops/test_ops.py @@ -1376,6 +1376,10 @@ test_case_nn_ops = [ 'block': P.Softmax(), 'desc_inputs': [[5, 5]], 'desc_bprop': [[5, 5]]}), + ('Softsign', { + 'block': P.Softsign(), + 'desc_inputs': [[5, 5]], + 'desc_bprop': [[5, 5]]}), ('DepthwiseConv2dNative_1', { 'block': P.DepthwiseConv2dNative(3, (3, 3), pad_mode="pad", pad=1, stride=2), 'desc_inputs': [[10, 32, 32, 32], [1, 32, 3, 3]],