You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_nn_ops_check.py 22 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """ test ops """
  16. import numpy as np
  17. import mindspore.nn as nn
  18. from mindspore import Tensor
  19. from mindspore.ops import functional as F
  20. from mindspore.ops import operations as P
  21. from ....mindspore_test_framework.mindspore_test import mindspore_test
  22. from ....mindspore_test_framework.pipeline.forward.compile_forward \
  23. import pipeline_for_compile_forward_ge_graph_for_case_by_case_config_exception
  24. class Conv2DBackpropInputNet(nn.Cell):
  25. def __init__(self, net, x_shape):
  26. super(Conv2DBackpropInputNet, self).__init__()
  27. self.net = net
  28. self.x_shape = x_shape
  29. def construct(self, dout, w):
  30. return self.net(dout, w, self.x_shape)
  31. class TopKNet(nn.Cell):
  32. def __init__(self, net, k):
  33. super(TopKNet, self).__init__()
  34. self.net = net
  35. self.k = k
  36. def construct(self, x):
  37. return self.net(x, self.k)
  38. raise_set = [
  39. # input is scalar
  40. ('Flatten0', {
  41. 'block': (P.Flatten(), {'exception': TypeError, 'error_keywords': ['Flatten']}),
  42. 'desc_inputs': [5.0],
  43. 'skip': ['backward']}),
  44. # dim of input is zero
  45. ('Flatten1', {
  46. 'block': (P.Flatten(), {'exception': ValueError, 'error_keywords': ['Flatten']}),
  47. 'desc_inputs': [F.scalar_to_tensor(5.0)],
  48. 'skip': ['backward']}),
  49. # input is scalar
  50. ('Softmax0', {
  51. 'block': (P.Softmax(), {'exception': TypeError, 'error_keywords': ['Softmax']}),
  52. 'desc_inputs': [5.0],
  53. 'skip': ['backward']}),
  54. # axis is empty tuple
  55. ('Softmax1', {
  56. 'block': (P.Softmax(axis=()), {'exception': ValueError, 'error_keywords': ['Softmax']}),
  57. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.float32))],
  58. 'skip': ['backward']}),
  59. # axis value is not in range
  60. ('Softmax2', {
  61. 'block': (P.Softmax(axis=2), {'exception': ValueError, 'error_keywords': ['Softmax']}),
  62. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.float32))],
  63. 'skip': ['backward']}),
  64. # input is scalar
  65. ('LogSoftmax0', {
  66. 'block': (P.LogSoftmax(), {'exception': TypeError, 'error_keywords': ['LogSoftmax']}),
  67. 'desc_inputs': [5.0],
  68. 'skip': ['backward']}),
  69. # axis value is not in range
  70. ('LogSoftmax1', {
  71. 'block': (P.LogSoftmax(axis=2), {'exception': ValueError, 'error_keywords': ['LogSoftmax']}),
  72. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.float32))],
  73. 'skip': ['backward']}),
  74. # input is scalar
  75. ('ReLU0', {
  76. 'block': (P.ReLU(), {'exception': TypeError, 'error_keywords': ['ReLU']}),
  77. 'desc_inputs': [5.0],
  78. 'skip': ['backward']}),
  79. # input is Tensor(Bool)
  80. ('ReLU1', {
  81. 'block': (P.ReLU(), {'exception': TypeError, 'error_keywords': ['ReLU']}),
  82. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.bool_))],
  83. 'skip': ['backward']}),
  84. # input is scalar
  85. ('ReLU60', {
  86. 'block': (P.ReLU6(), {'exception': TypeError, 'error_keywords': ['ReLU6']}),
  87. 'desc_inputs': [5.0],
  88. 'skip': ['backward']}),
  89. # input is Tensor(int32)
  90. ('ReLU61', {
  91. 'block': (P.ReLU6(), {'exception': TypeError, 'error_keywords': ['ReLU6']}),
  92. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.int32))],
  93. 'skip': ['backward']}),
  94. # input is scalar
  95. ('Elu0', {
  96. 'block': (P.Elu(), {'exception': TypeError, 'error_keywords': ['Elu']}),
  97. 'desc_inputs': [5.0],
  98. 'skip': ['backward']}),
  99. # input is Tensor(int32)
  100. ('Elu1', {
  101. 'block': (P.Elu(), {'exception': TypeError, 'error_keywords': ['Elu']}),
  102. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.int32))],
  103. 'skip': ['backward']}),
  104. # input is scalar
  105. ('Sigmoid0', {
  106. 'block': (P.Sigmoid(), {'exception': TypeError, 'error_keywords': ['Sigmoid']}),
  107. 'desc_inputs': [5.0],
  108. 'skip': ['backward']}),
  109. # input is Tensor(int32)
  110. ('Sigmoid1', {
  111. 'block': (P.Sigmoid(), {'exception': TypeError, 'error_keywords': ['Sigmoid']}),
  112. 'desc_inputs': [Tensor(np.ones([3, 4]).astype(np.int32))],
  113. 'skip': ['backward']}),
  114. # input is scalar
  115. ('Tanh0', {
  116. 'block': (P.Tanh(), {'exception': TypeError, 'error_keywords': ['Tanh']}),
  117. 'desc_inputs': [5.0],
  118. 'skip': ['backward']}),
  119. # input is scalar
  120. ('BatchNorm0', {
  121. 'block': (P.BatchNorm(is_training=False), {'exception': TypeError, 'error_keywords': ['BatchNorm']}),
  122. 'desc_inputs': [5.0, 5.0, 5.0, 5.0, 5.0],
  123. 'skip': ['backward']}),
  124. # is_training=False and mean=None
  125. ('BatchNorm1', {
  126. 'block': (P.BatchNorm(is_training=False), {'exception': TypeError, 'error_keywords': ['BatchNorm']}),
  127. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([5, 3]).astype(np.float32)),
  128. Tensor(np.ones([5, 3]).astype(np.float32)), None, None],
  129. 'skip': ['backward']}),
  130. # is_training=True and mean=None
  131. ('BatchNorm2', {
  132. 'block': (P.BatchNorm(is_training=True), {'exception': TypeError, 'error_keywords': ['BatchNorm']}),
  133. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  134. Tensor(np.ones([3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float16)),
  135. Tensor(np.ones([3]).astype(np.float32))],
  136. 'skip': ['backward']}),
  137. # scale and bias rank > 1
  138. ('BatchNorm3', {
  139. 'block': (P.BatchNorm(is_training=True), {'exception': ValueError, 'error_keywords': ['BatchNorm']}),
  140. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([5, 3]).astype(np.float32)),
  141. Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  142. Tensor(np.ones([3]).astype(np.float32))],
  143. 'skip': ['backward']}),
  144. # scale and bias shape not match
  145. ('BatchNorm4', {
  146. 'block': (P.BatchNorm(is_training=True), {'exception': ValueError, 'error_keywords': ['BatchNorm']}),
  147. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  148. Tensor(np.ones([7]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  149. Tensor(np.ones([3]).astype(np.float32))],
  150. 'skip': ['backward']}),
  151. # is_training=False, mean and variance shape not match
  152. ('BatchNorm5', {
  153. 'block': (P.BatchNorm(is_training=False), {'exception': ValueError, 'error_keywords': ['BatchNorm']}),
  154. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  155. Tensor(np.ones([3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  156. Tensor(np.ones([5]).astype(np.float32))],
  157. 'skip': ['backward']}),
  158. # is_training=False, mean and scale shape not match
  159. ('BatchNorm6', {
  160. 'block': (P.BatchNorm(is_training=False), {'exception': ValueError, 'error_keywords': ['BatchNorm']}),
  161. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32)),
  162. Tensor(np.ones([3]).astype(np.float32)), Tensor(np.ones([5]).astype(np.float32)),
  163. Tensor(np.ones([5]).astype(np.float32))],
  164. 'skip': ['backward']}),
  165. # input is scalar
  166. ('Conv2D0', {
  167. 'block': (P.Conv2D(2, (5, 5)), {'exception': TypeError, 'error_keywords': ['Conv2D']}),
  168. 'desc_inputs': [5.0, 5.0],
  169. 'skip': ['backward']}),
  170. # input is Tensor(bool)
  171. ('Conv2D1', {
  172. 'block': (P.Conv2D(2, (5, 5)), {'exception': TypeError, 'error_keywords': ['Conv2D']}),
  173. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_)), Tensor(np.ones([5]).astype(np.bool_))],
  174. 'skip': ['backward']}),
  175. # input x and w type mismatch
  176. ('Conv2D2', {
  177. 'block': (P.Conv2D(2, (5, 5)), {'exception': TypeError, 'error_keywords': ['Conv2D']}),
  178. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float32)), Tensor(np.ones([5]).astype(np.float16))],
  179. 'skip': ['backward']}),
  180. # rank of x is not 4
  181. ('Conv2D3', {
  182. 'block': (P.Conv2D(2, (5, 5)), {'exception': ValueError, 'error_keywords': ['Conv2D']}),
  183. 'desc_inputs': [Tensor(np.ones([1, 1]).astype(np.float32)), Tensor(np.ones([1, 1, 9, 9]).astype(np.float32))],
  184. 'skip': ['backward']}),
  185. # rank of 2 is not 4
  186. ('Conv2D4', {
  187. 'block': (P.Conv2D(2, (5, 5)), {'exception': ValueError, 'error_keywords': ['Conv2D']}),
  188. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  189. Tensor(np.ones([1, 1, 9]).astype(np.float32))],
  190. 'skip': ['backward']}),
  191. # x_shape[1] / group != w_shape[1]
  192. ('Conv2D5', {
  193. 'block': (P.Conv2D(2, (5, 5)), {'exception': ValueError, 'error_keywords': ['Conv2D']}),
  194. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  195. Tensor(np.ones([1, 2, 9, 9]).astype(np.float32))],
  196. 'skip': ['backward']}),
  197. # out_channel != w_shape[0]
  198. ('Conv2D6', {
  199. 'block': (P.Conv2D(2, (5, 5)), {'exception': ValueError, 'error_keywords': ['Conv2D']}),
  200. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  201. Tensor(np.ones([1, 1, 9, 9]).astype(np.float32))],
  202. 'skip': ['backward']}),
  203. # kernel_size != w_shape[2:4]
  204. ('Conv2D7', {
  205. 'block': (P.Conv2D(2, (5, 5)), {'exception': ValueError, 'error_keywords': ['Conv2D']}),
  206. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  207. Tensor(np.ones([2, 1, 5, 6]).astype(np.float32))],
  208. 'skip': ['backward']}),
  209. # input is scalar
  210. ('DepthwiseConv2dNative0', {
  211. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  212. {'exception': TypeError, 'error_keywords': ['DepthwiseConv2dNative']}),
  213. 'desc_inputs': [5.0, 5.0],
  214. 'skip': ['backward']}),
  215. # input is Tensor(bool)
  216. ('DepthwiseConv2dNative1', {
  217. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  218. {'exception': TypeError, 'error_keywords': ['DepthwiseConv2dNative']}),
  219. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_)), Tensor(np.ones([5]).astype(np.bool_))],
  220. 'skip': ['backward']}),
  221. # input x and w type mismatch
  222. ('DepthwiseConv2dNative2', {
  223. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  224. {'exception': TypeError, 'error_keywords': ['DepthwiseConv2dNative']}),
  225. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float32)), Tensor(np.ones([5]).astype(np.float16))],
  226. 'skip': ['backward']}),
  227. # rank of x is not 4
  228. ('DepthwiseConv2dNative3', {
  229. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  230. {'exception': ValueError, 'error_keywords': ['DepthwiseConv2dNative']}),
  231. 'desc_inputs': [Tensor(np.ones([1, 1]).astype(np.float32)), Tensor(np.ones([1, 1, 9, 9]).astype(np.float32))],
  232. 'skip': ['backward']}),
  233. # rank of 2 is not 4
  234. ('DepthwiseConv2dNative4', {
  235. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  236. {'exception': ValueError, 'error_keywords': ['DepthwiseConv2dNative']}),
  237. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  238. Tensor(np.ones([1, 1, 9]).astype(np.float32))],
  239. 'skip': ['backward']}),
  240. # x_shape[1] != w_shape[1]
  241. ('DepthwiseConv2dNative5', {
  242. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  243. {'exception': ValueError, 'error_keywords': ['DepthwiseConv2dNative']}),
  244. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  245. Tensor(np.ones([1, 2, 9, 9]).astype(np.float32))],
  246. 'skip': ['backward']}),
  247. # kernel_size != w_shape[2:4]
  248. ('DepthwiseConv2dNative6', {
  249. 'block': (P.DepthwiseConv2dNative(2, (5, 5)),
  250. {'exception': ValueError, 'error_keywords': ['DepthwiseConv2dNative']}),
  251. 'desc_inputs': [Tensor(np.ones([1, 1, 9, 9]).astype(np.float32)),
  252. Tensor(np.ones([2, 1, 5, 6]).astype(np.float32))],
  253. 'skip': ['backward']}),
  254. # input is scalar
  255. ('MaxPoolWithArgmax0', {
  256. 'block': (P.MaxPoolWithArgmax(), {'exception': TypeError, 'error_keywords': ['MaxPoolWithArgmax']}),
  257. 'desc_inputs': [5.0],
  258. 'skip': ['backward']}),
  259. # input is Tensor(bool)
  260. ('MaxPoolWithArgmax1', {
  261. 'block': (P.MaxPoolWithArgmax(), {'exception': TypeError, 'error_keywords': ['MaxPoolWithArgmax']}),
  262. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_))],
  263. 'skip': ['backward']}),
  264. # rank of x is not 4
  265. ('MaxPoolWithArgmax2', {
  266. 'block': (P.MaxPoolWithArgmax(), {'exception': ValueError, 'error_keywords': ['MaxPoolWithArgmax']}),
  267. 'desc_inputs': [Tensor(np.ones([1, 1, 32]).astype(np.float32))],
  268. 'skip': ['backward']}),
  269. # kernel size is invalid(very large)
  270. ('MaxPoolWithArgmax3', {
  271. 'block': (P.MaxPoolWithArgmax(kernel_size=50),
  272. {'exception': ValueError, 'error_keywords': ['MaxPoolWithArgmax']}),
  273. 'desc_inputs': [Tensor(np.ones([1, 1, 32, 32]).astype(np.float32))],
  274. 'skip': ['backward']}),
  275. # input is scalar
  276. ('MaxPool0', {
  277. 'block': (P.MaxPool(), {'exception': TypeError, 'error_keywords': ['MaxPool']}),
  278. 'desc_inputs': [5.0],
  279. 'skip': ['backward']}),
  280. # rank of x is not 4
  281. ('MaxPool1', {
  282. 'block': (P.MaxPool(), {'exception': ValueError, 'error_keywords': ['MaxPool']}),
  283. 'desc_inputs': [Tensor(np.ones([1, 1, 32]).astype(np.float32))],
  284. 'skip': ['backward']}),
  285. # rank of x is not 4
  286. ('MaxPool2', {
  287. 'block': (P.MaxPool(kernel_size=50, strides=1), {'exception': ValueError, 'error_keywords': ['MaxPool']}),
  288. 'desc_inputs': [Tensor(np.ones([1, 1, 32, 32]).astype(np.float32))],
  289. 'skip': ['backward']}),
  290. # input is scalar
  291. ('AvgPool0', {
  292. 'block': (P.AvgPool(), {'exception': TypeError, 'error_keywords': ['AvgPool']}),
  293. 'desc_inputs': [5.0],
  294. 'skip': ['backward']}),
  295. # rank of x is not 4
  296. ('AvgPool1', {
  297. 'block': (P.AvgPool(), {'exception': ValueError, 'error_keywords': ['AvgPool']}),
  298. 'desc_inputs': [Tensor(np.ones([1, 1, 32]).astype(np.float32))],
  299. 'skip': ['backward']}),
  300. # rank of x is not 4
  301. ('AvgPool2', {
  302. 'block': (P.AvgPool(kernel_size=50, strides=1), {'exception': ValueError, 'error_keywords': ['AvgPool']}),
  303. 'desc_inputs': [Tensor(np.ones([1, 1, 32, 32]).astype(np.float32))],
  304. 'skip': ['backward']}),
  305. # input is scalar
  306. ('Conv2DBackpropInput0', {
  307. 'block': (Conv2DBackpropInputNet(P.Conv2DBackpropInput(2, (5, 5)), (2, 3)),
  308. {'exception': TypeError, 'error_keywords': ['Conv2DBackpropInput']}),
  309. 'desc_inputs': [5.0, 5.0],
  310. 'skip': ['backward']}),
  311. # input is Tensor(bool)
  312. ('Conv2DBackpropInput1', {
  313. 'block': (Conv2DBackpropInputNet(P.Conv2DBackpropInput(2, (5, 5)), (2, 3)),
  314. {'exception': TypeError, 'error_keywords': ['Conv2DBackpropInput']}),
  315. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_)), Tensor(np.ones([5]).astype(np.bool_))],
  316. 'skip': ['backward']}),
  317. # types of doutput and w mismatch
  318. ('Conv2DBackpropInput2', {
  319. 'block': (Conv2DBackpropInputNet(P.Conv2DBackpropInput(2, (5, 5)), (2, 3)),
  320. {'exception': TypeError, 'error_keywords': ['Conv2DBackpropInput']}),
  321. 'desc_inputs': [Tensor(np.ones([5]).astype(np.int32)), Tensor(np.ones([5]).astype(np.float32))],
  322. 'skip': ['backward']}),
  323. # types x_size is not tuple
  324. ('Conv2DBackpropInput3', {
  325. 'block': (Conv2DBackpropInputNet(P.Conv2DBackpropInput(2, (5, 5)), 2),
  326. {'exception': TypeError, 'error_keywords': ['Conv2DBackpropInput']}),
  327. 'desc_inputs': [Tensor(np.ones([5]).astype(np.int32)), Tensor(np.ones([5]).astype(np.float32))],
  328. 'skip': ['backward']}),
  329. # types x_size is not tuple(int,...)
  330. ('Conv2DBackpropInput4', {
  331. 'block': (Conv2DBackpropInputNet(P.Conv2DBackpropInput(2, (5, 5)), (2, 3.0)),
  332. {'exception': TypeError, 'error_keywords': ['Conv2DBackpropInput']}),
  333. 'desc_inputs': [Tensor(np.ones([5]).astype(np.int32)), Tensor(np.ones([5]).astype(np.float32))],
  334. 'skip': ['backward']}),
  335. # input is scalar
  336. ('BiasAdd0', {
  337. 'block': (P.BiasAdd(), {'exception': TypeError, 'error_keywords': ['BiasAdd']}),
  338. 'desc_inputs': [5.0, 5.0],
  339. 'skip': ['backward']}),
  340. # input is Tensor(bool)
  341. ('BiasAdd1', {
  342. 'block': (P.BiasAdd(), {'exception': TypeError, 'error_keywords': ['BiasAdd']}),
  343. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_)), Tensor(np.ones([5]).astype(np.bool_))],
  344. 'skip': ['backward']}),
  345. # types of x and bias mismatch
  346. ('BiasAdd2', {
  347. 'block': (P.BiasAdd(), {'exception': TypeError, 'error_keywords': ['BiasAdd']}),
  348. 'desc_inputs': [Tensor(np.ones([5]).astype(np.int32)), Tensor(np.ones([5]).astype(np.float32))],
  349. 'skip': ['backward']}),
  350. # rank of x less than 2
  351. ('BiasAdd3', {
  352. 'block': (P.BiasAdd(), {'exception': ValueError, 'error_keywords': ['BiasAdd']}),
  353. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float32)), Tensor(np.ones([5]).astype(np.float32))],
  354. 'skip': ['backward']}),
  355. # rank of bias is not equal to 1
  356. ('BiasAdd4', {
  357. 'block': (P.BiasAdd(), {'exception': ValueError, 'error_keywords': ['BiasAdd']}),
  358. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([5, 3]).astype(np.float32))],
  359. 'skip': ['backward']}),
  360. # b_shape[0] != x_shape[1]
  361. ('BiasAdd5', {
  362. 'block': (P.BiasAdd(), {'exception': ValueError, 'error_keywords': ['BiasAdd']}),
  363. 'desc_inputs': [Tensor(np.ones([5, 3]).astype(np.float32)), Tensor(np.ones([5]).astype(np.float32))],
  364. 'skip': ['backward']}),
  365. # input x is scalar
  366. ('TopK0', {
  367. 'block': (TopKNet(P.TopK(), 5), {'exception': TypeError, 'error_keywords': ['TopK']}),
  368. 'desc_inputs': [5.0],
  369. 'skip': ['backward']}),
  370. # input x is Tensor(bool)
  371. ('TopK1', {
  372. 'block': (TopKNet(P.TopK(), 5), {'exception': TypeError, 'error_keywords': ['TopK']}),
  373. 'desc_inputs': [Tensor(np.ones([10]).astype(np.bool_))],
  374. 'skip': ['backward']}),
  375. # k is not integer
  376. ('TopK2', {
  377. 'block': (TopKNet(P.TopK(), 5.0), {'exception': TypeError, 'error_keywords': ['TopK']}),
  378. 'desc_inputs': [Tensor(np.ones([10]).astype(np.float32))],
  379. 'skip': ['backward']}),
  380. # input is scalar
  381. ('SoftmaxCrossEntropyWithLogits0', {
  382. 'block': (P.SoftmaxCrossEntropyWithLogits(),
  383. {'exception': TypeError, 'error_keywords': ['SoftmaxCrossEntropyWithLogits']}),
  384. 'desc_inputs': [5.0, 5.0],
  385. 'skip': ['backward']}),
  386. # input is Tensor(bool)
  387. ('SoftmaxCrossEntropyWithLogits1', {
  388. 'block': (P.SoftmaxCrossEntropyWithLogits(),
  389. {'exception': TypeError, 'error_keywords': ['SoftmaxCrossEntropyWithLogits']}),
  390. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_)), Tensor(np.ones([5]).astype(np.bool_))],
  391. 'skip': ['backward']}),
  392. # types of logits and labels mismatch
  393. ('SoftmaxCrossEntropyWithLogits2', {
  394. 'block': (P.SoftmaxCrossEntropyWithLogits(),
  395. {'exception': TypeError, 'error_keywords': ['SoftmaxCrossEntropyWithLogits']}),
  396. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float16)), Tensor(np.ones([5]).astype(np.float32))],
  397. 'skip': ['backward']}),
  398. # shapes of logits and labels mismatch
  399. ('SoftmaxCrossEntropyWithLogits3', {
  400. 'block': (P.SoftmaxCrossEntropyWithLogits(),
  401. {'exception': ValueError, 'error_keywords': ['SoftmaxCrossEntropyWithLogits']}),
  402. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float32)), Tensor(np.ones([3]).astype(np.float32))],
  403. 'skip': ['backward']}),
  404. # input is scalar
  405. ('SparseSoftmaxCrossEntropyWithLogits0', {
  406. 'block': (P.SparseSoftmaxCrossEntropyWithLogits(),
  407. {'exception': TypeError, 'error_keywords': ['SparseSoftmaxCrossEntropyWithLogits']}),
  408. 'desc_inputs': [5.0, 5.0],
  409. 'skip': ['backward']}),
  410. # logits is Tensor(bool)
  411. ('SparseSoftmaxCrossEntropyWithLogits1', {
  412. 'block': (P.SparseSoftmaxCrossEntropyWithLogits(),
  413. {'exception': TypeError, 'error_keywords': ['SparseSoftmaxCrossEntropyWithLogits']}),
  414. 'desc_inputs': [Tensor(np.ones([5]).astype(np.bool_)), Tensor(np.ones([5]).astype(np.bool_))],
  415. 'skip': ['backward']}),
  416. # labels is Tensor(bool)
  417. ('SparseSoftmaxCrossEntropyWithLogits2', {
  418. 'block': (P.SparseSoftmaxCrossEntropyWithLogits(),
  419. {'exception': TypeError, 'error_keywords': ['SparseSoftmaxCrossEntropyWithLogits']}),
  420. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float32)), Tensor(np.ones([5]).astype(np.bool_))],
  421. 'skip': ['backward']}),
  422. # logits_shape[0] != labels_shape[0]
  423. ('SparseSoftmaxCrossEntropyWithLogits3', {
  424. 'block': (P.SparseSoftmaxCrossEntropyWithLogits(),
  425. {'exception': ValueError, 'error_keywords': ['SparseSoftmaxCrossEntropyWithLogits']}),
  426. 'desc_inputs': [Tensor(np.ones([5]).astype(np.float32)), Tensor(np.ones([3]).astype(np.int32))],
  427. 'skip': ['backward']}),
  428. ]
  429. @mindspore_test(pipeline_for_compile_forward_ge_graph_for_case_by_case_config_exception)
  430. def test_check_exception():
  431. return raise_set