From cdb7ec937b0ef62b9cf320ee8172a6a7dba5a86d Mon Sep 17 00:00:00 2001 From: zhaojichen Date: Wed, 3 Jun 2020 02:44:25 -0400 Subject: [PATCH] add inv,invgrad&invert for vm --- mindspore/ccsrc/kernel/tbe/tbe_adapter.cc | 1 + mindspore/ops/_grad/grad_math_ops.py | 11 +++++ mindspore/ops/_op_impl/tbe/__init__.py | 3 ++ mindspore/ops/_op_impl/tbe/inv.py | 39 +++++++++++++++ mindspore/ops/_op_impl/tbe/inv_grad.py | 39 +++++++++++++++ mindspore/ops/_op_impl/tbe/invert.py | 36 ++++++++++++++ mindspore/ops/operations/__init__.py | 4 +- mindspore/ops/operations/_grad_ops.py | 17 +++++++ mindspore/ops/operations/math_ops.py | 60 +++++++++++++++++++++++ tests/ut/python/ops/test_ops.py | 9 ++++ 10 files changed, 218 insertions(+), 1 deletion(-) create mode 100644 mindspore/ops/_op_impl/tbe/inv.py create mode 100644 mindspore/ops/_op_impl/tbe/inv_grad.py create mode 100644 mindspore/ops/_op_impl/tbe/invert.py diff --git a/mindspore/ccsrc/kernel/tbe/tbe_adapter.cc b/mindspore/ccsrc/kernel/tbe/tbe_adapter.cc index 2af70bd44b..c912de10b3 100644 --- a/mindspore/ccsrc/kernel/tbe/tbe_adapter.cc +++ b/mindspore/ccsrc/kernel/tbe/tbe_adapter.cc @@ -98,6 +98,7 @@ static std::map tbe_func_adapter_map = { {"n_ms_with_mask", "nms_with_mask"}, {"square_sum_all", "square_sum_all"}, {"cum_sum", "cumsum_d"}, + {"inv_grad", "inv_grad"}, {"apply_rms_prop", "apply_rms_prop_d"}, {"cum_prod", "cumprod_d"}, {"reduce_all", "reduce_all_d"}, diff --git a/mindspore/ops/_grad/grad_math_ops.py b/mindspore/ops/_grad/grad_math_ops.py index 6a484ae012..ba9973d81b 100755 --- a/mindspore/ops/_grad/grad_math_ops.py +++ b/mindspore/ops/_grad/grad_math_ops.py @@ -1025,3 +1025,14 @@ def get_bprop_atanh(self): dx = div(1, tmp) * dout return (dx,) return bprop + + +@bprop_getters.register(P.Inv) +def get_bprop_inv(self): + """Grad definition for 'Inv' operation""" + inv_grad = G.InvGrad() + + def bprop(x, out, dout): + dx = inv_grad(x, dout) + return (dx,) + return bprop diff --git a/mindspore/ops/_op_impl/tbe/__init__.py b/mindspore/ops/_op_impl/tbe/__init__.py index 12e95cf781..dd1fcb3177 100644 --- a/mindspore/ops/_op_impl/tbe/__init__.py +++ b/mindspore/ops/_op_impl/tbe/__init__.py @@ -233,6 +233,9 @@ from .atan_grad import _atan_grad_tbe from .atanh import _atanh_tbe from .cosh import _cosh_tbe from .sinh import _sinh_tbe +from .inv import _inv_tbe +from .inv_grad import _inv_grad_tbe +from .invert import _invert_tbe from .basic_lstm_cell import _basic_lstm_cell_tbe from .basic_lstm_cell_c_state_grad import _basic_lstm_cell_c_state_grad_tbe from .basic_lstm_cell_weight_grad import _basic_lstm_cell_weight_grad_tbe diff --git a/mindspore/ops/_op_impl/tbe/inv.py b/mindspore/ops/_op_impl/tbe/inv.py new file mode 100644 index 0000000000..32115b154f --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/inv.py @@ -0,0 +1,39 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""Inv op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +inv_op_info = TBERegOp("Inv") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("inv.so") \ + .compute_cost(10) \ + .kernel_name("inv") \ + .partial_flag(True) \ + .input(0, "x", False, "required", "all") \ + .output(0, "y", False, "required", "all") \ + .dtype_format(DataType.I32_Default, DataType.I32_Default) \ + .dtype_format(DataType.F32_Default, DataType.F32_Default) \ + .dtype_format(DataType.F16_Default, DataType.F16_Default) \ + .dtype_format(DataType.I8_Default, DataType.I8_Default) \ + .dtype_format(DataType.U8_Default, DataType.U8_Default) \ + .get_op_info() + + +@op_info_register(inv_op_info) +def _inv_tbe(): + """Inv TBE register""" + return diff --git a/mindspore/ops/_op_impl/tbe/inv_grad.py b/mindspore/ops/_op_impl/tbe/inv_grad.py new file mode 100644 index 0000000000..70626b8808 --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/inv_grad.py @@ -0,0 +1,39 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""InvGrad op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +inv_grad_op_info = TBERegOp("InvGrad") \ + .fusion_type("ELEMWISE") \ + .async_flag(False) \ + .binfile_name("inv_grad.so") \ + .compute_cost(10) \ + .kernel_name("inv_grad") \ + .partial_flag(True) \ + .input(0, "x", False, "required", "all") \ + .input(1, "grad", False, "required", "all") \ + .output(0, "y", False, "required", "all") \ + .dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F16_Default) \ + .dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \ + .dtype_format(DataType.I32_Default, DataType.I32_Default, DataType.I32_Default) \ + .dtype_format(DataType.I8_Default, DataType.I8_Default, DataType.I8_Default) \ + .get_op_info() + + +@op_info_register(inv_grad_op_info) +def _inv_grad_tbe(): + """InvGrad TBE register""" + return diff --git a/mindspore/ops/_op_impl/tbe/invert.py b/mindspore/ops/_op_impl/tbe/invert.py new file mode 100644 index 0000000000..887eee45e7 --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/invert.py @@ -0,0 +1,36 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +"""Invert op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +invert_op_info = TBERegOp("Invert") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("invert.so") \ + .compute_cost(10) \ + .kernel_name("invert") \ + .partial_flag(True) \ + .input(0, "x", False, "required", "all") \ + .output(0, "y", False, "required", "all") \ + .dtype_format(DataType.I16_Default, DataType.I16_Default) \ + .dtype_format(DataType.U16_Default, DataType.U16_Default) \ + .get_op_info() + + +@op_info_register(invert_op_info) +def _invert_tbe(): + """Invert TBE register""" + return diff --git a/mindspore/ops/operations/__init__.py b/mindspore/ops/operations/__init__.py index 15e33c6823..dc1a7182de 100644 --- a/mindspore/ops/operations/__init__.py +++ b/mindspore/ops/operations/__init__.py @@ -41,7 +41,7 @@ from .control_ops import ControlDepend, GeSwitch, Merge from .inner_ops import ScalarCast from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AssignAdd, AssignSub, Atan2, BatchMatMul, BitwiseAnd, BitwiseOr, - BitwiseXor, + BitwiseXor, Inv, Invert, ReduceMax, ReduceMin, ReduceMean, ReduceSum, ReduceAll, ReduceProd, CumProd, Cos, Div, DivNoNan, Equal, EqualCount, Exp, Expm1, Erf, Erfc, Floor, FloorDiv, FloorMod, Ceil, Acosh, Greater, GreaterEqual, Less, LessEqual, Log, Log1p, LogicalAnd, @@ -141,6 +141,8 @@ __all__ = [ 'RealDiv', 'Div', 'DivNoNan', + 'Inv', + 'Invert', 'TruncatedNormal', 'Fill', 'OnesLike', diff --git a/mindspore/ops/operations/_grad_ops.py b/mindspore/ops/operations/_grad_ops.py index 008f5f0edb..2f8bcbe289 100644 --- a/mindspore/ops/operations/_grad_ops.py +++ b/mindspore/ops/operations/_grad_ops.py @@ -1276,3 +1276,20 @@ class BasicLSTMCellInputGrad(PrimitiveWithInfer): validator.check_type_name("dgate", dgate_dtype, [mstype.float16, mstype.float32], self.name) validator.check_type_name("w", w_dtype, [mstype.float16, mstype.float32], self.name) return (dgate_dtype, dgate_dtype) + + +class InvGrad(PrimitiveWithInfer): + """Computes gradients for inv operation.""" + + @prim_attr_register + def __init__(self): + pass + + def infer_shape(self, x, grad): + validator.check("x_shape", x, "grad_shape", grad, Rel.EQ, self.name) + return x + + def infer_dtype(self, x, grad): + validator.check_type_name("dgate", x, [mstype.float16, mstype.float32, mstype.int32, mstype.int8], self.name) + validator.check_type_name("grad", grad, [mstype.float16, mstype.float32, mstype.int32, mstype.int8], self.name) + return x diff --git a/mindspore/ops/operations/math_ops.py b/mindspore/ops/operations/math_ops.py index 8322ae2007..af7d7c0a26 100644 --- a/mindspore/ops/operations/math_ops.py +++ b/mindspore/ops/operations/math_ops.py @@ -2597,3 +2597,63 @@ class BesselI1e(PrimitiveWithInfer): def infer_dtype(self, x): validator.check_tensor_type_same({'x': x}, mstype.number_type, self.name) return x + + +class Inv(PrimitiveWithInfer): + """ + Computes Inv(Reciprocal) of input tensor element-wise. + + Inputs: + - **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`. + + Outputs: + Tensor, has the same shape as `input_x`. + + Examples: + >>> inv = P.Inv() + >>> input_x = Tensor(np.array([0.25, 0.4, 0.31, 0.52]), mindspore.float32) + >>> output = inv(input_x) + [4., 2.5, 3.2258065, 1.923077] + """ + + @prim_attr_register + def __init__(self): + pass + + def infer_shape(self, x_shape): + return x_shape + + def infer_dtype(self, x_dtype): + validator.check_tensor_type_same({'x_dtype': x_dtype}, [mstype.float16, mstype.float32, + mstype.int32, mstype.int8, + mstype.uint8], self.name) + return x_dtype + + +class Invert(PrimitiveWithInfer): + """ + Flips all bits of input tensor element-wise. + + Inputs: + - **input_x** (Tensor[int16], Tensor[uint16]) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`. + + Outputs: + Tensor, has the same shape as `input_x`. + + Examples: + >>> invert = P.Invert() + >>> input_x = Tensor(np.array([25, 4, 13, 9]), mindspore.int16) + >>> output = invert(input_x) + [-26, -5, -14, -10] + """ + + @prim_attr_register + def __init__(self): + pass + + def infer_shape(self, x_shape): + return x_shape + + def infer_dtype(self, x_dtype): + validator.check_tensor_type_same({'x_dtype': x_dtype}, [mstype.int16, mstype.uint16], self.name) + return x_dtype diff --git a/tests/ut/python/ops/test_ops.py b/tests/ut/python/ops/test_ops.py index f9b7ee6483..a1bccdcc44 100755 --- a/tests/ut/python/ops/test_ops.py +++ b/tests/ut/python/ops/test_ops.py @@ -750,6 +750,15 @@ test_case_math_ops = [ 'block': P.Sinh(), 'desc_inputs': [[3, 4, 5]], 'desc_bprop': [[3, 4, 5]]}), + ('Inv', { + 'block': P.Inv(), + 'desc_inputs': [[21, 9, 12, 5]], + 'desc_bprop': [[21, 9, 12, 5]]}), + ('Invert', { + 'block': P.Invert(), + 'desc_inputs': [Tensor(np.array([[24, 4, 13, 9], [1, 5, 10, 8]]).astype(np.int16))], + 'desc_bprop': [], + 'skip': ['backward']}), ] test_case_nn_ops = [