You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_gather_v2.py 15 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import numpy as np
  16. import mindspore as ms
  17. import mindspore.nn as nn
  18. from mindspore import Tensor
  19. from mindspore import context
  20. from mindspore.common.api import _cell_graph_executor
  21. from mindspore.ops import composite as C
  22. from mindspore.ops import operations as P
  23. from tests.ut.python.ops.test_math_ops import VirtualLoss
  24. grad_all = C.GradOperation(get_all=True)
  25. class NetWithLoss(nn.Cell):
  26. def __init__(self, network):
  27. super(NetWithLoss, self).__init__()
  28. self.loss = VirtualLoss()
  29. self.network = network
  30. def construct(self, x, y):
  31. predict = self.network(x, y)
  32. return self.loss(predict)
  33. class GradWrap(nn.Cell):
  34. def __init__(self, network):
  35. super(GradWrap, self).__init__()
  36. self.network = network
  37. def construct(self, x, y):
  38. return grad_all(self.network)(x, y)
  39. class Net(nn.Cell):
  40. def __init__(self, axis=0, strategy1=None, strategy2=None, shape=None, target="", gather_out_strategy=None):
  41. super().__init__()
  42. if shape is None:
  43. shape = [64, 64]
  44. self.gatherv2 = P.Gather().shard(strategy1, gather_out_strategy).add_prim_attr("primitive_target", target)
  45. self.mul = P.Mul().shard(strategy2)
  46. self.index = Tensor(np.ones(shape), dtype=ms.int32)
  47. self.axis = axis
  48. def construct(self, x, y):
  49. out = self.gatherv2(x, self.index, self.axis)
  50. out = self.mul(out, y)
  51. return out
  52. def compile_graph(net, device_num, parallel_mode, x, y):
  53. context.set_auto_parallel_context(device_num=device_num, global_rank=0, parallel_mode=parallel_mode)
  54. net.set_auto_parallel()
  55. net.set_train()
  56. _cell_graph_executor.compile(net, x, y)
  57. def test_gatherv2_semi_auto0():
  58. """
  59. Feature: distribute operator gather in auto parallel.
  60. Description: gather net with strategy in semi auto parallel, gather axis is 0.
  61. Expectation: compile done without error.
  62. """
  63. strategy1 = ((1, 8), (1, 1))
  64. strategy2 = ((4, 2, 1), (4, 2, 1))
  65. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2)))
  66. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  67. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  68. compile_graph(net, 8, "semi_auto_parallel", x, y)
  69. def test_gatherv2_semi_auto1():
  70. """
  71. Feature: distribute operator gather in auto parallel.
  72. Description: gather net with strategy in semi auto parallel, gather axis is 0.
  73. Expectation: compile done without error.
  74. """
  75. strategy1 = ((8, 1), (1, 1))
  76. strategy2 = ((4, 2, 1), (4, 2, 1))
  77. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2)))
  78. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  79. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  80. compile_graph(net, 8, "semi_auto_parallel", x, y)
  81. def test_gatherv2_semi_auto2():
  82. """
  83. Feature: distribute operator gather in auto parallel.
  84. Description: gather net with strategy in semi auto parallel, gather axis is 0.
  85. Expectation: compile done without error.
  86. """
  87. strategy1 = ((2, 4), (1, 1))
  88. strategy2 = ((4, 2, 1), (4, 2, 1))
  89. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2)))
  90. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  91. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  92. compile_graph(net, 8, "semi_auto_parallel", x, y)
  93. def test_gatherv2_semi_auto3():
  94. """
  95. Feature: distribute operator gather in auto parallel.
  96. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  97. Expectation: compile done without error.
  98. """
  99. strategy1 = ((1, 8), (1, 1))
  100. strategy2 = ((4, 2, 1), (4, 2, 1))
  101. net = GradWrap(NetWithLoss(Net(1, strategy1, strategy2)))
  102. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  103. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  104. compile_graph(net, 8, "semi_auto_parallel", x, y)
  105. def test_gatherv2_semi_auto4():
  106. """
  107. Feature: distribute operator gather in auto parallel.
  108. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  109. Expectation: compile done without error.
  110. """
  111. strategy1 = ((8, 1), (1, 1))
  112. strategy2 = ((4, 2, 1), (4, 2, 1))
  113. net = GradWrap(NetWithLoss(Net(1, strategy1, strategy2)))
  114. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  115. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  116. compile_graph(net, 8, "semi_auto_parallel", x, y)
  117. def test_gatherv2_semi_auto5():
  118. """
  119. Feature: distribute operator gather in auto parallel.
  120. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  121. Expectation: compile done without error.
  122. """
  123. strategy1 = ((2, 4), (1, 1))
  124. strategy2 = ((4, 2, 1), (4, 2, 1))
  125. net = GradWrap(NetWithLoss(Net(1, strategy1, strategy2)))
  126. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  127. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  128. compile_graph(net, 8, "semi_auto_parallel", x, y)
  129. def test_gatherv2_semi_auto6():
  130. """
  131. Feature: distribute operator gather in auto parallel.
  132. Description: gather net with strategy in semi auto parallel, gather axis is 0.
  133. Expectation: compile done without error.
  134. """
  135. strategy2 = ((4, 2, 1), (4, 2, 1))
  136. net = GradWrap(NetWithLoss(Net(0, None, strategy2)))
  137. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  138. y = Tensor(np.ones([64, 64, 32]), dtype=ms.float32)
  139. compile_graph(net, 8, "semi_auto_parallel", x, y)
  140. def test_gatherv2_semi_auto7():
  141. """
  142. Feature: distribute operator gather in auto parallel.
  143. Description: gather net with strategy in semi auto parallel, gather axis is 1.
  144. Expectation: compile done without error.
  145. """
  146. strategy2 = ((4, 2, 1), (4, 2, 1))
  147. net = GradWrap(NetWithLoss(Net(1, None, strategy2)))
  148. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  149. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  150. compile_graph(net, 8, "semi_auto_parallel", x, y)
  151. def test_gatherv2_semi_auto8():
  152. """
  153. Feature: distribute operator gather in auto parallel.
  154. Description: gather net with strategy in semi auto parallel, gather axis is 0.
  155. Expectation: compile done without error.
  156. """
  157. strategy1 = ((8,), (1, 1))
  158. strategy2 = ((4, 2), (4, 2))
  159. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2)))
  160. x = Tensor(np.ones([64]), dtype=ms.float32)
  161. y = Tensor(np.ones([64, 64]), dtype=ms.float32)
  162. compile_graph(net, 8, "semi_auto_parallel", x, y)
  163. def test_gatherv2_forward_all_reduce():
  164. """
  165. Feature: distribute operator gather in auto parallel.
  166. Description: gather net using forward all_reduce in semi auto parallel, gather axis is 0.
  167. Expectation: compile done without error.
  168. """
  169. strategy1 = ((8, 1), (1, 1))
  170. strategy2 = ((2, 4, 1), (2, 4, 1))
  171. context.set_auto_parallel_context(dataset_strategy="full_batch")
  172. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, shape=[2, 64])))
  173. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  174. y = Tensor(np.ones([2, 64, 64]), dtype=ms.float32)
  175. compile_graph(net, 8, "semi_auto_parallel", x, y)
  176. def test_gatherv2_shard_batch_and_axis():
  177. """
  178. Feature: distribute operator gather in auto parallel.
  179. Description: gather net with batch and axis sharding strategy in semi auto parallel, gather axis is 0.
  180. Expectation: compile done without error.
  181. """
  182. strategy1 = ((4, 1), (2, 1))
  183. strategy2 = ((2, 4, 1), (2, 4, 1))
  184. context.set_auto_parallel_context(dataset_strategy="full_batch")
  185. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, shape=[2, 64])))
  186. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  187. y = Tensor(np.ones([2, 64, 64]), dtype=ms.float32)
  188. compile_graph(net, 8, "semi_auto_parallel", x, y)
  189. def test_gatherv2_split_axis_0_repeat_calc():
  190. """
  191. Feature: distribute operator gather in auto parallel.
  192. Description: gather net with repeat calculate strategy in semi auto parallel, gather axis is 0.
  193. Expectation: compile done without error.
  194. """
  195. strategy1 = ((4, 1), (1, 1))
  196. strategy2 = ((2, 4, 1), (2, 4, 1))
  197. context.set_auto_parallel_context(dataset_strategy="full_batch")
  198. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, shape=[2, 64])))
  199. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  200. y = Tensor(np.ones([2, 64, 64]), dtype=ms.float32)
  201. compile_graph(net, 8, "semi_auto_parallel", x, y)
  202. def test_gatherv2_auto0():
  203. """
  204. Feature: distribute operator gather in auto parallel.
  205. Description: gather net without strategy in auto parallel, gather axis is 0.
  206. Expectation: compile done without error.
  207. """
  208. net = GradWrap(NetWithLoss(Net(0)))
  209. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  210. y = Tensor(np.ones([64, 64, 32]), dtype=ms.float32)
  211. compile_graph(net, 8, "auto_parallel", x, y)
  212. def test_gatherv2_auto1():
  213. """
  214. Feature: distribute operator gather in auto parallel.
  215. Description: gather net without strategy in auto parallel, gather axis is 1.
  216. Expectation: compile done without error.
  217. """
  218. net = GradWrap(NetWithLoss(Net(1)))
  219. x = Tensor(np.ones([64, 32]), dtype=ms.float32)
  220. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  221. compile_graph(net, 8, "auto_parallel", x, y)
  222. def test_gatherv2_out_strategy_allreduce():
  223. """
  224. Feature: distribute operator gather in semi auto parallel.
  225. Description: axis is 0, split axis with device num and out strategy use allreduce.
  226. Expectation: compile done without error.
  227. """
  228. strategy1 = ((8, 1), (1, 1))
  229. out_strategy = ((1, 1, 1),)
  230. strategy2 = ((2, 4, 1), (2, 4, 1))
  231. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, gather_out_strategy=out_strategy)))
  232. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  233. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  234. compile_graph(net, 8, "semi_auto_parallel", x, y)
  235. def test_gatherv2_out_strategy_allreduce_repeat_calc():
  236. """
  237. Feature: distribute operator gather in semi auto parallel.
  238. Description: axis is 0, split axis, split num small than device num and out strategy use allreduce.
  239. Expectation: compile done without error.
  240. """
  241. strategy1 = ((4, 1), (1, 1))
  242. out_strategy = ((1, 1, 1),)
  243. strategy2 = ((2, 4, 1), (2, 4, 1))
  244. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, gather_out_strategy=out_strategy)))
  245. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  246. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  247. compile_graph(net, 8, "semi_auto_parallel", x, y)
  248. def test_gatherv2_out_strategy_reducescatter():
  249. """
  250. Feature: distribute operator gather in semi auto parallel.
  251. Description: axis is 0, split axis with device num and out strategy use reducescatter.
  252. Expectation: compile done without error.
  253. """
  254. strategy1 = ((8, 1), (1, 1))
  255. out_strategy = ((8, 1, 1),)
  256. strategy2 = ((2, 4, 1), (2, 4, 1))
  257. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, gather_out_strategy=out_strategy)))
  258. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  259. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  260. compile_graph(net, 8, "semi_auto_parallel", x, y)
  261. def test_gatherv2_out_strategy_reducescatter_repeat_calc():
  262. """
  263. Feature: distribute operator gather in semi auto parallel.
  264. Description: axis is 0, split axis, split num small than device num and out strategy use reducescatter.
  265. Expectation: compile done without error.
  266. """
  267. strategy1 = ((4, 1), (1, 1))
  268. out_strategy = ((4, 1, 1),)
  269. strategy2 = ((2, 4, 1), (2, 4, 1))
  270. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, gather_out_strategy=out_strategy)))
  271. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  272. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  273. compile_graph(net, 8, "semi_auto_parallel", x, y)
  274. def test_gatherv2_shard_batch_and_axis_out_strategy_allreduce():
  275. """
  276. Feature: distribute operator gather in semi auto parallel.
  277. Description: axis is 0, split axis and batch, out strategy use allreduce.
  278. Expectation: compile done without error.
  279. """
  280. strategy1 = ((4, 1), (2, 1))
  281. out_strategy = ((2, 1, 1),)
  282. strategy2 = ((2, 4, 1), (2, 4, 1))
  283. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, gather_out_strategy=out_strategy)))
  284. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  285. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  286. compile_graph(net, 8, "semi_auto_parallel", x, y)
  287. def test_gatherv2_shard_batch_and_axis_out_strategy_reducescatter():
  288. """
  289. Feature: distribute operator gather in semi auto parallel.
  290. Description: axis is 0, split axis and batch, out strategy use reducescatter.
  291. Expectation: compile done without error.
  292. """
  293. strategy1 = ((4, 1), (2, 1))
  294. out_strategy = ((8, 1, 1),)
  295. strategy2 = ((2, 4, 1), (2, 4, 1))
  296. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, gather_out_strategy=out_strategy)))
  297. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  298. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  299. compile_graph(net, 8, "semi_auto_parallel", x, y)
  300. def test_gatherv2_target_cpu_reducescatter():
  301. """
  302. Feature: distribute operator gather in semi auto parallel.
  303. Description: axis is 0, split axis and batch, out strategy use reducescatter.
  304. Expectation: compile done without error.
  305. """
  306. strategy1 = ((8, 1), (1, 1))
  307. out_strategy = ((8, 1, 1),)
  308. strategy2 = ((2, 4, 1), (2, 4, 1))
  309. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, target="CPU", gather_out_strategy=out_strategy)))
  310. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  311. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  312. compile_graph(net, 8, "semi_auto_parallel", x, y)
  313. def test_gatherv2_target_cpu_allreduce():
  314. """
  315. Feature: distribute operator gather in semi auto parallel.
  316. Description: axis is 0, split axis and batch, out strategy use allreduce.
  317. Expectation: compile done without error.
  318. """
  319. strategy1 = ((8, 1), (1, 1))
  320. out_strategy = ((1, 1, 1),)
  321. strategy2 = ((2, 4, 1), (2, 4, 1))
  322. net = GradWrap(NetWithLoss(Net(0, strategy1, strategy2, target="CPU", gather_out_strategy=out_strategy)))
  323. x = Tensor(np.ones([64, 64]), dtype=ms.float32)
  324. y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)
  325. compile_graph(net, 8, "semi_auto_parallel", x, y)