You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_python_pass.py 7.0 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import numpy as np
  16. import mindspore
  17. import mindspore.nn as nn
  18. from mindspore import context
  19. from mindspore.common.tensor import Tensor
  20. from mindspore.ops import operations as P
  21. from mindspore.common.python_pass_register import registe_pass, PyPassManager
  22. from mindspore.common.api import _generate_pip_args
  23. from mindspore._c_expression import generate_key, Executor_
  24. from mindspore.common.graph_pattern import IsIn, IsPrimTypeOf, CallWith, IsNot, AnyPattern, NewTensor
  25. context.set_context(mode=context.GRAPH_MODE)
  26. def get_func_graph(obj, *args, phase="validate"):
  27. args_names, args_list = _generate_pip_args(obj, *args)
  28. dic = dict(zip(args_names, args_list))
  29. key = generate_key(phase, dic)
  30. phase_prefix = str(key[1])
  31. if phase == 'export':
  32. phase = phase + '.' + phase_prefix + '.' + str(obj.create_time)
  33. else:
  34. phase = phase_prefix + phase + '.' + str(obj.create_time)
  35. _executor = Executor_.get_instance()
  36. _executor.compile(obj, args_list, phase, False)
  37. return _executor.get_func_graph(phase)
  38. def test_softmax_relu():
  39. """
  40. Use python pass to transform from Softmax to ReLU.
  41. """
  42. inputs = Tensor(np.ones([42]), mindspore.float16)
  43. softmax_model = nn.Softmax()
  44. @registe_pass(run_only_once=True)
  45. def softmax_relu_pass():
  46. x = AnyPattern()
  47. softmax_pattern = IsPrimTypeOf(P.Softmax())
  48. pattern = CallWith(softmax_pattern, inputs=[x])
  49. relu_pattern = IsPrimTypeOf(P.ReLU(), should_replace=False)
  50. target = CallWith(relu_pattern, inputs=[x])
  51. return pattern, target
  52. transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(2)
  53. ppm = PyPassManager()
  54. ppm.unregiste(softmax_relu_pass)
  55. assert "ReLU" in transformed_repr
  56. assert "Softmax" not in transformed_repr
  57. def test_isin_pattern():
  58. """
  59. Test IsIn pattern which expresses the IsIn/OneOf semantics.
  60. """
  61. inputs = Tensor(np.ones([42]), mindspore.float16)
  62. softmax_model = nn.Softmax()
  63. @registe_pass(run_only_once=True)
  64. def softmax_relu_pass():
  65. x = AnyPattern()
  66. softmax_pattern = IsPrimTypeOf(P.Softmax())
  67. call_softmax = CallWith(softmax_pattern, inputs=[x])
  68. relu_pattern = IsPrimTypeOf(P.ReLU())
  69. call_relu = CallWith(relu_pattern, inputs=[x])
  70. pattern = IsIn([call_softmax, call_relu])
  71. relu6_pattern = IsPrimTypeOf(P.ReLU6(), should_replace=False)
  72. target = CallWith(relu6_pattern, inputs=[x])
  73. return pattern, target
  74. transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(2)
  75. ppm = PyPassManager()
  76. ppm.unregiste(softmax_relu_pass)
  77. assert "ReLU6" in transformed_repr
  78. assert "Softmax" not in transformed_repr
  79. def test_isnot_pattern_0():
  80. """
  81. Test IsNot pattern which expresses the IsNot semantics.
  82. Case: IsNot pass failed to match
  83. """
  84. class ConvBN(nn.Cell):
  85. def __init__(self):
  86. super(ConvBN, self).__init__()
  87. self.conv = P.Conv2D(32, 3)
  88. self.conv_weight = Tensor(np.ones([32, 32, 3, 3]), mindspore.float32)
  89. self.scale = Tensor(np.ones([32]), mindspore.float32)
  90. self.bias = Tensor(np.ones([32]), mindspore.float32)
  91. self.mean = Tensor(np.ones([32]), mindspore.float32)
  92. self.variance = Tensor(np.ones([32]), mindspore.float32)
  93. self.bn = P.BatchNorm()
  94. def construct(self, x):
  95. x = self.conv(x, self.conv_weight)
  96. x = self.bn(x, self.scale, self.bias, self.mean, self.variance)
  97. return x
  98. inputs = Tensor(np.random.normal(0, 1, (10, 32, 32, 32)), mindspore.float32)
  99. conv_bn_model = ConvBN()
  100. @registe_pass(run_only_once=True)
  101. def single_bn_pass():
  102. """
  103. Sub a BN which does NOT take Conv as inputs to ReLU6.
  104. """
  105. conv2d_prim = IsPrimTypeOf("Conv2D")
  106. conv2d = CallWith(conv2d_prim)
  107. pattern_0 = IsNot(conv2d)
  108. pattern = CallWith(P.BatchNorm(), inputs=[pattern_0])
  109. target = CallWith(P.ReLU6(), inputs=[pattern_0])
  110. return pattern, target
  111. @registe_pass(run_only_once=True)
  112. def bn_pass():
  113. """
  114. Sub a BN to Softmax.
  115. """
  116. bn = P.BatchNorm()
  117. pattern = CallWith(bn)
  118. softmax = P.Softmax()
  119. target = CallWith(softmax, should_replace=False)
  120. return pattern, target
  121. transformed_repr = get_func_graph(conv_bn_model, inputs).get_return().expanded_str(5)
  122. ppm = PyPassManager()
  123. ppm.unregiste(single_bn_pass)
  124. ppm.unregiste(bn_pass)
  125. assert "ReLU6" not in transformed_repr
  126. assert "Softmax" in transformed_repr
  127. def test_isnot_pattern_1():
  128. """
  129. Test IsNot pattern which expresses the IsNot semantics.
  130. Case: IsNot pattern matches with the graph
  131. """
  132. inputs = Tensor(np.ones([42]), mindspore.float16)
  133. softmax_model = nn.Softmax()
  134. @registe_pass(run_only_once=True)
  135. def single_bn_pass():
  136. """
  137. Sub a BN which does NOT take MatMul as inputs to ReLU6.
  138. """
  139. matmul = IsPrimTypeOf("MatMul")
  140. pattern_0 = IsNot(matmul)
  141. softmax = P.Softmax()
  142. pattern = CallWith(softmax, inputs=[pattern_0])
  143. relu6 = P.ReLU6()
  144. target = CallWith(relu6, inputs=[pattern_0], should_replace=False)
  145. return pattern, target
  146. transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(5)
  147. ppm = PyPassManager()
  148. ppm.unregiste(single_bn_pass)
  149. assert "ReLU6" in transformed_repr
  150. assert "Softmax" not in transformed_repr
  151. def test_newtensor_pattern():
  152. inputs = Tensor(np.ones([42]), mindspore.float16)
  153. softmax_model = nn.Softmax()
  154. @registe_pass(run_only_once=True)
  155. def softmax_addn_pass():
  156. x = AnyPattern()
  157. softmax = P.Softmax()
  158. pattern = CallWith(softmax, inputs=[x])
  159. weight_tensor = Tensor(np.zeros([42]), mindspore.float16)
  160. new_weight = NewTensor(weight_tensor)
  161. addn_ops = P.AddN()
  162. target = CallWith(addn_ops, inputs=[x, new_weight], should_replace=False)
  163. return pattern, target
  164. transformed_repr = get_func_graph(softmax_model, inputs).get_return().expanded_str(2)
  165. ppm = PyPassManager()
  166. ppm.unregiste(softmax_addn_pass)
  167. assert "AddN" in transformed_repr
  168. assert "Softmax" not in transformed_repr