You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_softmax_op.py 2.3 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import pytest
  16. import numpy as np
  17. from mindspore import Tensor
  18. from mindspore.ops import operations as P
  19. import mindspore.nn as nn
  20. import mindspore.context as context
  21. class NetSoftmax(nn.Cell):
  22. def __init__(self):
  23. super(NetSoftmax, self).__init__()
  24. axis = -2
  25. self.softmax1 = P.Softmax()
  26. self.softmax2 = P.Softmax(axis)
  27. def construct(self, x):
  28. return self.softmax1(x), self.softmax2(x)
  29. @pytest.mark.level0
  30. @pytest.mark.platform_x86_gpu_training
  31. @pytest.mark.env_onecard
  32. def test_softmax():
  33. x = Tensor(np.array([[0.1, 0.3, 0.6, -0.3],
  34. [0.2, -0.6, 0.8, 0.6],
  35. [0.6, -1.2, 0.4, 0.6]]).astype(np.float32))
  36. expect1 = np.ones(3)
  37. expect2 = np.ones(4)
  38. error1 = expect1 * 1.0e-6
  39. error2 = expect2 * 1.0e-6
  40. context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU")
  41. Softmax = NetSoftmax()
  42. output = Softmax(x)
  43. outputSum1 = output[0].asnumpy().sum(axis=1)
  44. outputSum2 = output[1].asnumpy().sum(axis=0)
  45. diff1 = np.abs(outputSum1 - expect1)
  46. diff2 = np.abs(outputSum2 - expect2)
  47. assert np.all(diff1 < error1)
  48. assert np.all(diff2 < error2)
  49. context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
  50. Softmax = NetSoftmax()
  51. output = Softmax(x)
  52. outputSum1 = output[0].asnumpy().sum(axis=1)
  53. outputSum2 = output[1].asnumpy().sum(axis=0)
  54. diff1 = np.abs(outputSum1 - expect1)
  55. diff2 = np.abs(outputSum2 - expect2)
  56. assert np.all(diff1 < error1)
  57. assert np.all(diff2 < error2)