From ab427caf535e768f6d49de8aac994927224a5f3b Mon Sep 17 00:00:00 2001 From: baihuawei Date: Sun, 27 Sep 2020 09:53:27 +0800 Subject: [PATCH] fix softmax --- .../cpu/mkldnn/softmax_cpu_kernel.cc | 5 +++- tests/st/ops/cpu/test_softmax_op.py | 30 ++++++++++++++++++- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/mindspore/ccsrc/backend/kernel_compiler/cpu/mkldnn/softmax_cpu_kernel.cc b/mindspore/ccsrc/backend/kernel_compiler/cpu/mkldnn/softmax_cpu_kernel.cc index de9a8890df..d1f68038b5 100644 --- a/mindspore/ccsrc/backend/kernel_compiler/cpu/mkldnn/softmax_cpu_kernel.cc +++ b/mindspore/ccsrc/backend/kernel_compiler/cpu/mkldnn/softmax_cpu_kernel.cc @@ -28,9 +28,12 @@ void SoftmaxCPUKernel::InitKernel(const CNodePtr &kernel_node) { MS_LOG(EXCEPTION) << "cpu softmax only support input axis size 1"; } int axis = axis_list[0]; - if (axis == -1 || axis >= SizeToInt(src_shape.size())) { + if (axis >= SizeToInt(src_shape.size())) { axis = SizeToInt(src_shape.size()) - 1; } + while (axis < 0) { + axis += SizeToInt(src_shape.size()); + } dnnl::memory::desc src_desc = GetDefaultMemDesc(src_shape); dnnl::softmax_forward::desc desc = dnnl::softmax_forward::desc(dnnl::prop_kind::forward_training, src_desc, axis); auto prim_desc = dnnl::softmax_forward::primitive_desc(desc, MKLKernelEngine::Get().engine()); diff --git a/tests/st/ops/cpu/test_softmax_op.py b/tests/st/ops/cpu/test_softmax_op.py index 6562381076..73b9393510 100644 --- a/tests/st/ops/cpu/test_softmax_op.py +++ b/tests/st/ops/cpu/test_softmax_op.py @@ -29,7 +29,7 @@ context.set_context(mode=context.GRAPH_MODE, device_target='CPU') class NetSoftmax(nn.Cell): def __init__(self): super(NetSoftmax, self).__init__() - self.softmax = P.Softmax() + self.softmax = P.Softmax(axis=-1) x = Tensor(np.array([[0.1, 0.3, 0.6], [0.2, -0.6, 0.8], [0.6, 1, 0.4]]).astype(np.float32)) @@ -52,3 +52,31 @@ def test_softmax(): diff = np.abs(outputSum - expect) print(diff) assert np.all(diff < error) + + +class NetSoftmax1(nn.Cell): + def __init__(self): + super(NetSoftmax1, self).__init__() + self.softmax = P.Softmax(axis=-2) + x = Tensor(np.array([[0.1, 0.3, 0.6], + [0.2, -0.6, 0.8], + [0.6, 1, 0.4]]).astype(np.float32)) + self.x = Parameter(initializer(x, x.shape), name='x') + + def construct(self): + return self.softmax(self.x) + + +@pytest.mark.level0 +@pytest.mark.platform_x86_cpu +@pytest.mark.env_onecard +def test_softmax1(): + Softmax = NetSoftmax1() + output = Softmax() + output = output.asnumpy() + outputSum = output.sum(axis=0) + expect = np.ones(3) + error = expect * 1.0e-6 + diff = np.abs(outputSum - expect) + print(diff) + assert np.all(diff < error)