You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_dynamic_op.py 11 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335
  1. # Copyright 2021 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import sys
  16. import numpy as np
  17. import pytest
  18. import mindspore.context as context
  19. import mindspore.nn as nn
  20. from mindspore import Tensor
  21. import mindspore.common.dtype as mstype
  22. from mindspore.ops import operations as P
  23. from mindspore.ops.operations import _grad_ops as G
  24. from mindspore.ops.composite import GradOperation
  25. context.set_context(mode=context.GRAPH_MODE,
  26. device_target="CPU")
  27. class TileNet(nn.Cell):
  28. def __init__(self):
  29. super().__init__()
  30. self.tile = P.Tile()
  31. def construct(self, x, multiples):
  32. out = self.tile(x, multiples)
  33. return out
  34. @pytest.mark.level0
  35. @pytest.mark.platform_x86_cpu
  36. @pytest.mark.env_onecard
  37. def test_tile_multiple_tensor_cpu():
  38. """
  39. /// Feature: Tile op dynamic shape
  40. /// Description: Tile forward with dynamic shape
  41. /// Expectation: Euqal to expected value
  42. """
  43. if sys.platform != 'linux':
  44. return
  45. multiples_1 = Tensor(np.array([2, 1]), mstype.int64)
  46. multiples_2 = Tensor(np.array([4, 1]), mstype.int64)
  47. x = Tensor(np.array([[1, 2, 3, 4]]), mstype.float32)
  48. tile_net = TileNet()
  49. expect_1 = np.array([[1., 2., 3., 4.],
  50. [1., 2., 3., 4.]])
  51. expect_2 = np.array([[1., 2., 3., 4.],
  52. [1., 2., 3., 4.],
  53. [1., 2., 3., 4.],
  54. [1., 2., 3., 4.]])
  55. expect = [expect_1, expect_2]
  56. for i, multiples in enumerate([multiples_1, multiples_2]):
  57. output = tile_net(x, multiples)
  58. assert (output.asnumpy() == expect[i]).all()
  59. class GradTile(nn.Cell):
  60. def __init__(self, network):
  61. super().__init__()
  62. self.grad = GradOperation(sens_param=True)
  63. self.network = network
  64. self.unique = P.Unique()
  65. self.reshape = P.Reshape()
  66. def construct(self, input_x, multiples, grad):
  67. dy = self.unique(grad)[0]
  68. dy = self.reshape(dy, (2, 4))
  69. return self.grad(self.network)(input_x, multiples, dy)
  70. @pytest.mark.level0
  71. @pytest.mark.platform_x86_cpu
  72. @pytest.mark.env_onecard
  73. def test_tile_multiple_tensor_grad_cpu():
  74. """
  75. /// Feature: Tile op dynamic shape
  76. /// Description: Tile backward with dynamic shape
  77. /// Expectation: Euqal to expected value
  78. """
  79. if sys.platform != 'linux':
  80. return
  81. multiples = Tensor(np.array([2, 1]), mstype.int64)
  82. x0 = Tensor(np.array([[1, 2, 3, 4]]), mstype.float32)
  83. tile_net = GradTile(TileNet())
  84. dout = Tensor(np.arange(1, 9), mstype.float32)
  85. output = tile_net(x0, multiples, dout)
  86. expect = np.array([[6., 8., 10., 12.]])
  87. assert (output.asnumpy() == expect).all()
  88. class ConcatOffsetNet(nn.Cell):
  89. def __init__(self):
  90. super().__init__()
  91. self.unique = P.Unique()
  92. self.concat_offset = G.ConcatOffset(3, 0)
  93. self.reshape = P.Reshape()
  94. def construct(self, x, y, z):
  95. x = self.reshape(self.unique(x)[0], (-1, 1, 2, 1))
  96. y = self.reshape(self.unique(y)[0], (-1, 1, 2, 1))
  97. z = self.reshape(self.unique(z)[0], (-1, 1, 2, 1))
  98. out = self.concat_offset((x, y, z))
  99. return out
  100. @pytest.mark.level0
  101. @pytest.mark.platform_x86_cpu
  102. @pytest.mark.env_onecard
  103. def test_concat_offset_dynamic_cpu():
  104. """
  105. /// Feature: Concatoffset op dynamic shape
  106. /// Description: Concatoffset forward with dynamic shape
  107. /// Expectation: Euqal to expected value
  108. """
  109. if sys.platform != 'linux':
  110. return
  111. x = Tensor(np.array([1, 2, 3, 4, 5, 6]), mstype.float32)
  112. x2 = Tensor(np.array([1, 2, 3, 4, 5, 6]), mstype.float32)
  113. x3 = Tensor(np.array([1, 2, 3, 4, 5, 6]), mstype.float32)
  114. net = ConcatOffsetNet()
  115. out = net(x, x2, x3)
  116. expect = np.array([[0, 0, 0, 0],
  117. [3, 0, 0, 0],
  118. [6, 0, 0, 0]])
  119. if isinstance(out, tuple):
  120. assert (np.array(out) == expect).all()
  121. else:
  122. assert (out.asnumpy() == expect).all()
  123. class ConcatNet(nn.Cell):
  124. def __init__(self):
  125. super().__init__()
  126. self.unique = P.Unique()
  127. self.concat = P.Concat()
  128. self.reshape = P.Reshape()
  129. def construct(self, x, y, z, shape_tensor):
  130. x = self.reshape(x, shape_tensor)
  131. y = self.reshape(y, shape_tensor)
  132. z = self.reshape(z, shape_tensor)
  133. out = self.concat((x, y, z))
  134. return out
  135. class GradConcat(nn.Cell):
  136. def __init__(self, network):
  137. super().__init__()
  138. self.grad = GradOperation(sens_param=True)
  139. self.network = network
  140. self.unique = P.Unique()
  141. self.reshape = P.Reshape()
  142. def construct(self, x, y, z, shape, grad):
  143. # grad = self.reshape(grad, (-1,))
  144. dy = self.reshape(self.unique(grad)[0], (-1, 1, 2, 1))
  145. return self.grad(self.network)(x, y, z, shape, dy)
  146. @pytest.mark.level0
  147. @pytest.mark.platform_x86_cpu
  148. @pytest.mark.env_onecard
  149. def test_concat_dynamic_grad_cpu():
  150. """
  151. /// Feature: Concat op dynamic shape
  152. /// Description: Concat backward with dynamic shape
  153. /// Expectation: Euqal to expected value
  154. """
  155. if sys.platform != 'linux':
  156. return
  157. x = Tensor(np.array([1, 2, 3, 4, 5, 6]), mstype.float32)
  158. x2 = Tensor(np.array([1, 2, 3, 4, 5, 6]), mstype.float32)
  159. x3 = Tensor(np.array([1, 2, 3, 4, 5, 6]), mstype.float32)
  160. shape = Tensor(np.array([3, 1, 2, 1]), mstype.int64)
  161. dout = Tensor(np.arange(1, 19), mstype.float32)
  162. net = GradConcat(ConcatNet())
  163. output = net(x, x2, x3, shape, dout)
  164. expect = np.array([1., 2., 3., 4., 5., 6.])
  165. assert (output.asnumpy() == expect).all()
  166. class SliceNet(nn.Cell):
  167. def __init__(self):
  168. super().__init__()
  169. self.slice = P.Slice()
  170. def construct(self, x, begin, size):
  171. return self.slice(x, begin, size)
  172. @pytest.mark.level0
  173. @pytest.mark.platform_x86_cpu
  174. @pytest.mark.env_onecard
  175. def test_slice_begin_size_tensor_cpu():
  176. """
  177. /// Feature: Slice op dynamic shape
  178. /// Description: Slice forward with dynamic shape
  179. /// Expectation: Euqal to expected value
  180. """
  181. if sys.platform != 'linux':
  182. return
  183. x = Tensor(
  184. np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]), mstype.float32)
  185. begin = Tensor(
  186. np.array([0, 1, 0]), mstype.int64)
  187. size = Tensor(
  188. np.array([2, 1, 2]), mstype.int64)
  189. slice_net = SliceNet()
  190. output = slice_net(x, begin, size)
  191. expect = np.array([[[2., -2.]],
  192. [[4., -4.]]])
  193. assert (output.asnumpy() == expect).all()
  194. class GradSlice(nn.Cell):
  195. def __init__(self, network):
  196. super().__init__()
  197. self.grad = GradOperation(sens_param=True)
  198. self.network = network
  199. self.unique = P.Unique()
  200. self.reshape = P.Reshape()
  201. def construct(self, input_x, begin, size, grad):
  202. # grad = self.reshape(grad, (-1,))
  203. dy = self.unique(grad)[0]
  204. dy = self.reshape(dy, size)
  205. return self.grad(self.network)(input_x, begin, size, dy)
  206. @pytest.mark.level0
  207. @pytest.mark.platform_x86_cpu
  208. @pytest.mark.env_onecard
  209. def test_slice_begin_size_tensor_grad():
  210. """
  211. /// Feature: Slice op dynamic shape
  212. /// Description: Slice backward with dynamic shape
  213. /// Expectation: Euqal to expected value
  214. """
  215. if sys.platform != 'linux':
  216. return
  217. dy = Tensor(np.array([1, 2, 3, 4]), mstype.float32)
  218. x = Tensor(
  219. np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]), mstype.float32)
  220. begin = Tensor(
  221. np.array([0, 1, 0]), mstype.int64)
  222. size = Tensor(
  223. np.array([2, 1, 2]), mstype.int64)
  224. net = GradSlice(SliceNet())
  225. output = net(x, begin, size, dy)
  226. expect = np.array([[[0., 0., 0.],
  227. [1., 2., 0.]],
  228. [[0., 0., 0.],
  229. [3., 4., 0.]],
  230. [[0., 0., 0.],
  231. [0., 0., 0.]]])
  232. assert (output.asnumpy() == expect).all()
  233. class ReduceMeanNet(nn.Cell):
  234. def __init__(self):
  235. super().__init__()
  236. self.reduce_mean = P.ReduceMean(keep_dims=True)
  237. self.reshape = P.Reshape()
  238. self.tile = P.Tile()
  239. def construct(self, x, shape):
  240. y = self.reshape(x, shape)
  241. return self.reduce_mean(y, 0)
  242. class GradReduceMean(nn.Cell):
  243. def __init__(self, network):
  244. super().__init__()
  245. self.grad = GradOperation(get_all=True, sens_param=True)
  246. self.network = network
  247. self.reshape = P.Reshape()
  248. self.unique = P.Unique()
  249. def construct(self, input_x, shape, grad):
  250. grad = self.reshape(self.unique(grad)[0], (1, 2))
  251. return self.grad(self.network)(input_x, shape, grad)
  252. @pytest.mark.level0
  253. @pytest.mark.platform_x86_cpu
  254. @pytest.mark.env_onecard
  255. def test_reducemean_dynamic_cpu():
  256. """
  257. /// Feature: ReduceMean op dynamic shape
  258. /// Description: ReduceMean forward with dynamic shape
  259. /// Expectation: Euqal to expected value
  260. """
  261. if sys.platform != 'linux':
  262. return
  263. x = Tensor(np.array([10, 10, 2, 2]), mstype.float32)
  264. x2 = Tensor(np.array([2, 2]), mstype.int64)
  265. reduce_mean = ReduceMeanNet()
  266. out = reduce_mean(x, x2)
  267. expect = np.array([[6., 6.]])
  268. assert (out.asnumpy() == expect).all()
  269. @pytest.mark.level0
  270. @pytest.mark.platform_x86_cpu
  271. @pytest.mark.env_onecard
  272. def test_reducemean_dynamic_grad_cpu():
  273. """
  274. /// Feature: ReduceMean op dynamic shape
  275. /// Description: ReduceMean backward with dynamic shape
  276. /// Expectation: Euqal to expected value
  277. """
  278. if sys.platform != 'linux':
  279. return
  280. x = Tensor(np.array([10, 10, 2, 2]), mstype.float32)
  281. x2 = Tensor(np.array([2, 2]), mstype.int64)
  282. dout = Tensor(np.array([1, 3]), mstype.float32)
  283. reduce_mean = GradReduceMean(ReduceMeanNet())
  284. out = reduce_mean(x, x2, dout)
  285. expect = np.array([[0.5, 1.5, 0.5, 1.5]])
  286. assert (out[0].asnumpy() == expect).all()