You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_sparse_gather_v2.py 8.2 kB

5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import numpy as np
  16. import mindspore as ms
  17. import mindspore.nn as nn
  18. from mindspore import Tensor
  19. from mindspore import context
  20. from mindspore.common.api import _cell_graph_executor
  21. from mindspore.ops import composite as C
  22. from mindspore.ops import operations as P
  23. from tests.ut.python.ops.test_math_ops import VirtualLoss
  24. grad_all = C.GradOperation(get_all=True)
  25. class NetWithLoss(nn.Cell):
  26. def __init__(self, network):
  27. super(NetWithLoss, self).__init__()
  28. self.loss = VirtualLoss()
  29. self.network = network
  30. def construct(self, x, y):
  31. predict = self.network(x, y)
  32. return self.loss(predict)
  33. class GradWrap(nn.Cell):
  34. def __init__(self, network):
  35. super(GradWrap, self).__init__()
  36. self.network = network
  37. def construct(self, x, y):
  38. return grad_all(self.network)(x, y)
  39. class Net(nn.Cell):
  40. def __init__(self, axis=0, strategy1=None, strategy2=None, shape=None, target=""):
  41. super().__init__()
  42. if shape is None:
  43. shape = [64, 64]
  44. self.gatherv2 = P.SparseGatherV2().shard(strategy1).add_prim_attr("primitive_target", target)
  45. self.mul = P.Mul().shard(strategy2)
  46. self.index = Tensor(np.ones(shape), dtype=ms.int32)
  47. self.axis = axis
  48. def construct(self, x, y):
  49. out = self.gatherv2(x, self.index, self.axis)
  50. out = self.mul(out, y)
  51. return out
  52. def test_gatherv2_semi_auto0():
  53. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  54. strategy1 = ((8, 1), (1, 1))
  55. strategy2 = ((4, 2, 1), (4, 2, 1))
  56. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2)))
  57. net.set_auto_parallel()
  58. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  59. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  60. net.set_train()
  61. _cell_graph_executor.compile(net, x, y)
  62. def test_gatherv2_semi_auto1():
  63. """
  64. Feature: distribute operator SparseGatherV2 in auto parallel.
  65. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  66. Expectation: compile done without error.
  67. """
  68. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  69. strategy1 = ((1, 8), (1, 1))
  70. strategy2 = ((4, 2, 1), (4, 2, 1))
  71. net = GradWrap(NetWithLoss(Net(1, strategy1, strategy2)))
  72. net.set_auto_parallel()
  73. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  74. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  75. net.set_train()
  76. _cell_graph_executor.compile(net, x, y)
  77. def test_gatherv2_semi_auto2():
  78. """
  79. Feature: distribute operator SparseGatherV2 in auto parallel.
  80. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  81. Expectation: compile done without error.
  82. """
  83. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  84. strategy1 = ((8, 1), (1, 1))
  85. strategy2 = ((4, 2, 1), (4, 2, 1))
  86. net = GradWrap(NetWithLoss(Net(1, strategy1, strategy2)))
  87. net.set_auto_parallel()
  88. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  89. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  90. net.set_train()
  91. _cell_graph_executor.compile(net, x, y)
  92. def test_gatherv2_semi_auto3():
  93. """
  94. Feature: distribute operator SparseGatherV2 in auto parallel.
  95. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  96. Expectation: compile done without error.
  97. """
  98. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  99. strategy1 = ((2, 4), (1, 1))
  100. strategy2 = ((4, 2, 1), (4, 2, 1))
  101. net = GradWrap(NetWithLoss(Net(1, strategy1, strategy2)))
  102. net.set_auto_parallel()
  103. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  104. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  105. net.set_train()
  106. _cell_graph_executor.compile(net, x, y)
  107. def test_gatherv2_semi_auto4():
  108. """
  109. Feature: distribute operator SparseGatherV2 in auto parallel.
  110. Description: gather net with strategy in semi auto parallel, gather axis is 0.
  111. Expectation: compile done without error.
  112. """
  113. context.set_auto_parallel_context(dataset_strategy="full_batch")
  114. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  115. strategy2 = ((4, 2, 1), (4, 2, 1))
  116. net = GradWrap(NetWithLoss(Net(0, None, strategy2)))
  117. net.set_auto_parallel()
  118. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  119. y = Tensor(np.ones([64, 64, 32]), dtype=ms.float32)
  120. net.set_train()
  121. _cell_graph_executor.compile(net, x, y)
  122. def test_gatherv2_semi_auto5():
  123. """
  124. Feature: distribute operator SparseGatherV2 in auto parallel.
  125. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  126. Expectation: compile done without error.
  127. """
  128. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  129. strategy2 = ((4, 2, 1), (4, 2, 1))
  130. net = GradWrap(NetWithLoss(Net(1, None, strategy2)))
  131. net.set_auto_parallel()
  132. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  133. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  134. net.set_train()
  135. _cell_graph_executor.compile(net, x, y)
  136. def test_gatherv2_auto0():
  137. context.set_auto_parallel_context(dataset_strategy="full_batch")
  138. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="auto_parallel")
  139. net = GradWrap(NetWithLoss(Net(0)))
  140. net.set_auto_parallel()
  141. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  142. y = Tensor(np.ones([64, 64, 32]), dtype=ms.float32)
  143. net.set_train()
  144. _cell_graph_executor.compile(net, x, y)
  145. def test_gatherv2_auto1():
  146. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="auto_parallel")
  147. net = GradWrap(NetWithLoss(Net(1)))
  148. net.set_auto_parallel()
  149. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  150. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  151. net.set_train()
  152. _cell_graph_executor.compile(net, x, y)
  153. def test_gatherv2_cpu0():
  154. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  155. strategy1 = ((8, 1), (1, 1))
  156. strategy2 = ((4, 2, 1), (4, 2, 1))
  157. net = NetWithLoss(Net(0, strategy1, strategy2, None, "CPU"))
  158. net.set_auto_parallel()
  159. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  160. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  161. net.set_train()
  162. _cell_graph_executor.compile(net, x, y)
  163. def test_gatherv2_cpu1():
  164. context.set_auto_parallel_context(device_num=16, global_rank=0, parallel_mode="semi_auto_parallel")
  165. strategy1 = ((16, 1), (1, 1))
  166. strategy2 = ((4, 2, 1), (4, 2, 1))
  167. net = NetWithLoss(Net(0, strategy1, strategy2, None, "CPU"))
  168. net.set_auto_parallel()
  169. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  170. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  171. net.set_train()
  172. _cell_graph_executor.compile(net, x, y)
  173. def test_gatherv2_cpu2():
  174. context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")
  175. strategy1 = ((1, 8), (1, 1))
  176. strategy2 = ((4, 2, 1), (4, 2, 1))
  177. net = NetWithLoss(Net(0, strategy1, strategy2, None, "CPU"))
  178. net.set_auto_parallel()
  179. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  180. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  181. net.set_train()
  182. _cell_graph_executor.compile(net, x, y)