You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_elu_op.py 2.4 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import numpy as np
  16. import pytest
  17. import mindspore.context as context
  18. import mindspore.nn as nn
  19. from mindspore import Tensor
  20. from mindspore.ops import operations as P
  21. class NetElu(nn.Cell):
  22. def __init__(self):
  23. super(NetElu, self).__init__()
  24. self.elu = P.Elu()
  25. def construct(self, x):
  26. return self.elu(x)
  27. @pytest.mark.level0
  28. @pytest.mark.platform_x86_gpu_training
  29. @pytest.mark.env_onecard
  30. def test_elu_fp16():
  31. x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]).astype(np.float16))
  32. expect = np.array([[-0.632, 4.0, -0.999], [2.0, -0.993, 9.0]]).astype(np.float16)
  33. error = np.ones(shape=[2, 3]) * 1.0e-6
  34. context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU")
  35. elu = NetElu()
  36. output = elu(x)
  37. diff = output.asnumpy() - expect
  38. assert np.all(diff < error)
  39. context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
  40. elu = NetElu()
  41. output = elu(x)
  42. diff = output.asnumpy() - expect
  43. assert np.all(diff < error)
  44. @pytest.mark.level0
  45. @pytest.mark.platform_x86_gpu_training
  46. @pytest.mark.env_onecard
  47. def test_elu_fp32():
  48. x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]).astype(np.float32))
  49. expect = np.array([[-0.632, 4.0, -0.999], [2.0, -0.993, 9.0]]).astype(np.float32)
  50. error = np.ones(shape=[2, 3]) * 1.0e-6
  51. context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU")
  52. elu = NetElu()
  53. output = elu(x)
  54. diff = output.asnumpy() - expect
  55. assert np.all(diff < error)
  56. context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
  57. elu = NetElu()
  58. output = elu(x)
  59. diff = output.asnumpy() - expect
  60. assert np.all(diff < error)