You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_graph_fallback.py 6.3 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. # Copyright 2021 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """ test graph fallback """
  16. import pytest
  17. import numpy as np
  18. import mindspore.nn as nn
  19. from mindspore import Tensor, ms_function, context
  20. from mindspore.ops import operations as P
  21. from mindspore.ops import functional as F
  22. import mindspore.common.dtype as mstype
  23. import mindspore.common._monad as monad
  24. context.set_context(mode=context.GRAPH_MODE)
  25. # `add_func` is defined in current file.
  26. def add_func(x, y):
  27. return x + y
  28. @ms_function
  29. def do_increment(i):
  30. add_1 = F.partial(add_func, 1)
  31. return add_1(i)
  32. def test_increment():
  33. a = do_increment(9)
  34. assert a == 10
  35. @ms_function
  36. def use_monad(x, y):
  37. res = P.Mul()(x, y)
  38. res = F.depend(res, monad.U)
  39. return res
  40. def test_use_monad():
  41. x = Tensor(1.0, mstype.float32)
  42. y = Tensor(1.0, mstype.float32)
  43. print(use_monad(x, y))
  44. @ms_function
  45. def use_tensor_with_mstype():
  46. me_x = Tensor(1, mstype.int32)
  47. return me_x
  48. def test_tensor_with_mstype():
  49. """
  50. Feature: JIT Fallback
  51. Description: Test tensor with mstype in graph mode.
  52. Expectation: No exception.
  53. """
  54. print(use_tensor_with_mstype())
  55. @ms_function
  56. def use_tuple_of_tensor():
  57. me_x = (Tensor(1), Tensor(1))
  58. return me_x
  59. @pytest.mark.skip(reason='Not support graph fallback feature yet')
  60. def test_tuple_of_tensor():
  61. """
  62. Feature: JIT Fallback
  63. Description: Test tuple of tensor in graph mode.
  64. Expectation: No exception.
  65. """
  66. print(use_tuple_of_tensor())
  67. class Net(nn.Cell):
  68. def __init__(self):
  69. super(Net, self).__init__()
  70. self.x = Tensor([2, 3, 4])
  71. def construct(self):
  72. x_len = len(self.x)
  73. for i in range(x_len):
  74. print(i)
  75. return x_len
  76. def test_builtins_len():
  77. net = Net()
  78. net()
  79. @ms_function
  80. def np_fallback_func():
  81. array_x = tuple([2, 3, 4, 5])
  82. np_x = np.array(array_x).astype(np.float32)
  83. me_x = Tensor(np_x)
  84. me_x = me_x + me_x
  85. return me_x
  86. def test_np_fallback_func():
  87. print(np_fallback_func())
  88. # Test `return` interpret node.
  89. @ms_function
  90. def div_mod_func1():
  91. x = 8
  92. y = 3
  93. a = divmod(x, y)
  94. return Tensor(a)
  95. def test_div_mod_func1():
  96. print(div_mod_func1()) # (2, 2)
  97. # Test interpret node with parameters as input.
  98. @ms_function
  99. def div_mod_func2(x, y):
  100. a = divmod(x, y)
  101. return Tensor(a)
  102. def test_div_mod_func2_scalar():
  103. """
  104. Feature: JIT Fallback
  105. Description: Test divmod in graph.
  106. Expectation: No exception.
  107. """
  108. print(div_mod_func2(8, 3)) # (2, 2)
  109. @pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
  110. def test_div_mod_func2_tensor():
  111. """
  112. Feature: JIT Fallback
  113. Description: Test divmod with Tensor input in graph. We'll support it in Tensor Input Fallback solution.
  114. Expectation: Not supported exception.
  115. """
  116. with pytest.raises(RuntimeError) as err:
  117. print(div_mod_func2(Tensor(8), Tensor(3)))
  118. assert "Not support Tensor or variable type as input during running JIT Fallback, but got" in str(err.value)
  119. # NameError: name 'Tensor' is not defined.
  120. @ms_function
  121. def select_func(cond, x, y):
  122. if isinstance(cond, (tuple, list)):
  123. output = y
  124. elif isinstance(cond, Tensor):
  125. output = F.select(cond, x, y)
  126. else:
  127. output = x
  128. return output
  129. def test_select_func():
  130. cond = Tensor([True, False])
  131. x = Tensor([2, 3], mstype.float32)
  132. y = Tensor([1, 2], mstype.float32)
  133. print(select_func(cond, x, y))
  134. # Not interpret 'Tensor'.
  135. @ms_function
  136. def select_func2(cond, x, y):
  137. if isinstance(cond, (tuple, list)):
  138. output = y
  139. if isinstance(cond, Tensor):
  140. output = F.select(cond, x, y)
  141. else:
  142. output = x
  143. return output
  144. def test_select_func2():
  145. cond = Tensor([True, False])
  146. x = Tensor([2, 3], mstype.float32)
  147. y = Tensor([1, 2], mstype.float32)
  148. print(select_func2(cond, x, y))
  149. # NameError: name 'Tensor' is not defined.
  150. @ms_function
  151. def slice_func(a, b):
  152. a[1:3, ::] = b
  153. return a
  154. def test_slice_func():
  155. a = Tensor(np.arange(60).reshape(3, 4, 5), dtype=mstype.float32)
  156. b = Tensor([1], dtype=mstype.float32)
  157. print(slice_func(a, b))
  158. @ms_function
  159. def np_fallback_func_tensor_index(x):
  160. array_x = tuple([2, 3, 4, 5])
  161. np_x = np.array(array_x).astype(np.float32)
  162. me_x = Tensor(np_x)
  163. me_x = me_x + me_x
  164. return me_x[x]
  165. # NameError: name 'array_x' is not defined.
  166. @pytest.mark.skip(reason='Not support graph fallback feature yet')
  167. def test_np_fallback_func_tensor_index():
  168. """
  169. Feature: Fallback feature: support Tensor index.
  170. Description: Fallback feature: support Tensor index.
  171. Expectation: Fallback feature: support Tensor index.
  172. """
  173. x = Tensor(1, mstype.int32)
  174. output = np_fallback_func_tensor_index(x)
  175. output_expect = Tensor(6, mstype.float32)
  176. assert output == output_expect
  177. # EvalCNode: This may be not defined, or it can't be a operator.
  178. @pytest.mark.skip(reason='Not support graph fallback feature yet')
  179. def test_np_tensor_add():
  180. """
  181. Feature: Fallback feature
  182. Description: support Tensor add.
  183. Expectation: No exception.
  184. """
  185. @ms_function
  186. def np_tensor_add():
  187. a = Tensor(np.array(4))
  188. b = Tensor(np.array(5))
  189. tensor_list = [a, b]
  190. for tensor in tensor_list:
  191. print(tensor)
  192. x = 6
  193. np_x = np.array(x)
  194. c = Tensor(np_x)
  195. d = tensor_list[-1] + c
  196. tensor_list.append(d)
  197. return tensor_list
  198. tensor_list = np_tensor_add()
  199. print("tensor_list:", tensor_list)
  200. assert tensor_list[-1] == 11