You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_var_batch_map.py 21 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import numpy as np
  16. import mindspore.dataset as ds
  17. from mindspore import log as logger
  18. def test_batch_corner_cases():
  19. def gen(num):
  20. for i in range(num):
  21. yield (np.array([i]),)
  22. def test_repeat_batch(gen_num, repeats, batch_size, drop, res):
  23. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]).repeat(repeats).batch(batch_size, drop)
  24. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  25. res.append(item["num"])
  26. def test_batch_repeat(gen_num, repeats, batch_size, drop, res):
  27. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]).batch(batch_size, drop).repeat(repeats)
  28. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  29. res.append(item["num"])
  30. tst1, tst2, tst3, tst4 = [], [], [], []
  31. # case 1 & 2, where batch_size is greater than the entire epoch, with drop equals to both val
  32. test_repeat_batch(gen_num=2, repeats=4, batch_size=7, drop=False, res=tst1)
  33. np.testing.assert_array_equal(np.array([[0], [1], [0], [1], [0], [1], [0]]), tst1[0], "\nATTENTION BATCH FAILED\n")
  34. np.testing.assert_array_equal(np.array([[1]]), tst1[1], "\nATTENTION TEST BATCH FAILED\n")
  35. assert len(tst1) == 2, "\nATTENTION TEST BATCH FAILED\n"
  36. test_repeat_batch(gen_num=2, repeats=4, batch_size=5, drop=True, res=tst2)
  37. np.testing.assert_array_equal(np.array([[0], [1], [0], [1], [0]]), tst2[0], "\nATTENTION BATCH FAILED\n")
  38. assert len(tst2) == 1, "\nATTENTION TEST BATCH FAILED\n"
  39. # case 3 & 4, batch before repeat with different drop
  40. test_batch_repeat(gen_num=5, repeats=2, batch_size=4, drop=True, res=tst3)
  41. np.testing.assert_array_equal(np.array([[0], [1], [2], [3]]), tst3[0], "\nATTENTION BATCH FAILED\n")
  42. np.testing.assert_array_equal(tst3[0], tst3[1], "\nATTENTION BATCH FAILED\n")
  43. assert len(tst3) == 2, "\nATTENTION BATCH FAILED\n"
  44. test_batch_repeat(gen_num=5, repeats=2, batch_size=4, drop=False, res=tst4)
  45. np.testing.assert_array_equal(np.array([[0], [1], [2], [3]]), tst4[0], "\nATTENTION BATCH FAILED\n")
  46. np.testing.assert_array_equal(tst4[0], tst4[2], "\nATTENTION BATCH FAILED\n")
  47. np.testing.assert_array_equal(tst4[1], np.array([[4]]), "\nATTENTION BATCH FAILED\n")
  48. np.testing.assert_array_equal(tst4[1], tst4[3], "\nATTENTION BATCH FAILED\n")
  49. assert len(tst4) == 4, "\nATTENTION BATCH FAILED\n"
  50. # each sub-test in this function is tested twice with exact parameter except that the second test passes each row
  51. # to a pyfunc which makes a deep copy of the row
  52. def test_variable_size_batch():
  53. def check_res(arr1, arr2):
  54. for ind, _ in enumerate(arr1):
  55. if not np.array_equal(arr1[ind], np.array(arr2[ind])):
  56. return False
  57. return len(arr1) == len(arr2)
  58. def gen(num):
  59. for i in range(num):
  60. yield (np.array([i]),)
  61. def add_one_by_batch_num(batchInfo):
  62. return batchInfo.get_batch_num() + 1
  63. def add_one_by_epoch(batchInfo):
  64. return batchInfo.get_epoch_num() + 1
  65. def simple_copy(colList, batchInfo):
  66. _ = batchInfo
  67. return ([np.copy(arr) for arr in colList],)
  68. def test_repeat_batch(gen_num, r, drop, func, res):
  69. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]).repeat(r).batch(batch_size=func,
  70. drop_remainder=drop)
  71. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  72. res.append(item["num"])
  73. # same as test_repeat_batch except each row is passed through via a map which makes a copy of each element
  74. def test_repeat_batch_with_copy_map(gen_num, r, drop, func):
  75. res = []
  76. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]).repeat(r) \
  77. .batch(batch_size=func, drop_remainder=drop, input_columns=["num"], per_batch_map=simple_copy)
  78. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  79. res.append(item["num"])
  80. return res
  81. def test_batch_repeat(gen_num, r, drop, func, res):
  82. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]).batch(batch_size=func, drop_remainder=drop).repeat(
  83. r)
  84. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  85. res.append(item["num"])
  86. # same as test_batch_repeat except each row is passed through via a map which makes a copy of each element
  87. def test_batch_repeat_with_copy_map(gen_num, r, drop, func):
  88. res = []
  89. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]) \
  90. .batch(batch_size=func, drop_remainder=drop, input_columns=["num"], per_batch_map=simple_copy).repeat(r)
  91. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  92. res.append(item["num"])
  93. return res
  94. tst1, tst2, tst3, tst4, tst5, tst6, tst7 = [], [], [], [], [], [], []
  95. # no repeat, simple var size, based on batch_num
  96. test_repeat_batch(7, 1, True, add_one_by_batch_num, tst1)
  97. assert check_res(tst1, [[[0]], [[1], [2]], [[3], [4], [5]]]), "\nATTENTION VAR BATCH FAILED\n"
  98. assert check_res(tst1, test_repeat_batch_with_copy_map(7, 1, True, add_one_by_batch_num)), "\nMAP FAILED\n"
  99. test_repeat_batch(9, 1, False, add_one_by_batch_num, tst2)
  100. assert check_res(tst2, [[[0]], [[1], [2]], [[3], [4], [5]], [[6], [7], [8]]]), "\nATTENTION VAR BATCH FAILED\n"
  101. assert check_res(tst2, test_repeat_batch_with_copy_map(9, 1, False, add_one_by_batch_num)), "\nMAP FAILED\n"
  102. # batch after repeat, cross epoch batch
  103. test_repeat_batch(7, 2, False, add_one_by_batch_num, tst3)
  104. assert check_res(tst3, [[[0]], [[1], [2]], [[3], [4], [5]], [[6], [0], [1], [2]],
  105. [[3], [4], [5], [6]]]), "\nATTENTION VAR BATCH FAILED\n"
  106. assert check_res(tst3, test_repeat_batch_with_copy_map(7, 2, False, add_one_by_batch_num)), "\nMAP FAILED\n"
  107. # repeat after batch, no cross epoch batch, remainder dropped
  108. test_batch_repeat(9, 7, True, add_one_by_batch_num, tst4)
  109. assert check_res(tst4, [[[0]], [[1], [2]], [[3], [4], [5]]] * 7), "\nATTENTION VAR BATCH FAILED\n"
  110. assert check_res(tst4, test_batch_repeat_with_copy_map(9, 7, True, add_one_by_batch_num)), "\nAMAP FAILED\n"
  111. # repeat after batch, no cross epoch batch, remainder kept
  112. test_batch_repeat(9, 3, False, add_one_by_batch_num, tst5)
  113. assert check_res(tst5, [[[0]], [[1], [2]], [[3], [4], [5]], [[6], [7], [8]]] * 3), "\nATTENTION VAR BATCH FAILED\n"
  114. assert check_res(tst5, test_batch_repeat_with_copy_map(9, 3, False, add_one_by_batch_num)), "\nMAP FAILED\n"
  115. # batch_size based on epoch number, drop
  116. test_batch_repeat(4, 4, True, add_one_by_epoch, tst6)
  117. assert check_res(tst6, [[[0]], [[1]], [[2]], [[3]], [[0], [1]], [[2], [3]], [[0], [1], [2]],
  118. [[0], [1], [2], [3]]]), "\nATTENTION VAR BATCH FAILED\n"
  119. assert check_res(tst6, test_batch_repeat_with_copy_map(4, 4, True, add_one_by_epoch)), "\nMAP FAILED\n"
  120. # batch_size based on epoch number, no drop
  121. test_batch_repeat(4, 4, False, add_one_by_epoch, tst7)
  122. assert check_res(tst7, [[[0]], [[1]], [[2]], [[3]], [[0], [1]], [[2], [3]], [[0], [1], [2]], [[3]],
  123. [[0], [1], [2], [3]]]), "\nATTENTION VAR BATCH FAILED\n" + str(tst7)
  124. assert check_res(tst7, test_batch_repeat_with_copy_map(4, 4, False, add_one_by_epoch)), "\nMAP FAILED\n"
  125. def test_basic_batch_map():
  126. def check_res(arr1, arr2):
  127. for ind, _ in enumerate(arr1):
  128. if not np.array_equal(arr1[ind], np.array(arr2[ind])):
  129. return False
  130. return len(arr1) == len(arr2)
  131. def gen(num):
  132. for i in range(num):
  133. yield (np.array([i]),)
  134. def invert_sign_per_epoch(colList, batchInfo):
  135. return ([np.copy(((-1) ** batchInfo.get_epoch_num()) * arr) for arr in colList],)
  136. def invert_sign_per_batch(colList, batchInfo):
  137. return ([np.copy(((-1) ** batchInfo.get_batch_num()) * arr) for arr in colList],)
  138. def batch_map_config(num, r, batch_size, func, res):
  139. data1 = ds.GeneratorDataset((lambda: gen(num)), ["num"]) \
  140. .batch(batch_size=batch_size, input_columns=["num"], per_batch_map=func).repeat(r)
  141. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  142. res.append(item["num"])
  143. tst1, tst2, = [], []
  144. batch_map_config(4, 2, 2, invert_sign_per_epoch, tst1)
  145. assert check_res(tst1, [[[0], [1]], [[2], [3]], [[0], [-1]], [[-2], [-3]]]), "\nATTENTION MAP BATCH FAILED\n" + str(
  146. tst1)
  147. # each batch, the sign of a row is changed, test map is corrected performed according to its batch_num
  148. batch_map_config(4, 2, 2, invert_sign_per_batch, tst2)
  149. assert check_res(tst2,
  150. [[[0], [1]], [[-2], [-3]], [[0], [1]], [[-2], [-3]]]), "\nATTENTION MAP BATCH FAILED\n" + str(tst2)
  151. def test_batch_multi_col_map():
  152. def check_res(arr1, arr2):
  153. for ind, _ in enumerate(arr1):
  154. if not np.array_equal(arr1[ind], np.array(arr2[ind])):
  155. return False
  156. return len(arr1) == len(arr2)
  157. def gen(num):
  158. for i in range(num):
  159. yield (np.array([i]), np.array([i ** 2]))
  160. def col1_col2_add_num(col1, col2, batchInfo):
  161. _ = batchInfo
  162. return ([[np.copy(arr + 100) for arr in col1],
  163. [np.copy(arr + 300) for arr in col2]])
  164. def invert_sign_per_batch(colList, batchInfo):
  165. return ([np.copy(((-1) ** batchInfo.get_batch_num()) * arr) for arr in colList],)
  166. def invert_sign_per_batch_multi_col(col1, col2, batchInfo):
  167. return ([np.copy(((-1) ** batchInfo.get_batch_num()) * arr) for arr in col1],
  168. [np.copy(((-1) ** batchInfo.get_batch_num()) * arr) for arr in col2])
  169. def batch_map_config(num, r, batch_size, func, col_names, res):
  170. data1 = ds.GeneratorDataset((lambda: gen(num)), ["num", "num_square"]) \
  171. .batch(batch_size=batch_size, input_columns=col_names, per_batch_map=func).repeat(r)
  172. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  173. res.append(np.array([item["num"], item["num_square"]]))
  174. tst1, tst2, tst3, tst4 = [], [], [], []
  175. batch_map_config(4, 2, 2, invert_sign_per_batch, ["num_square"], tst1)
  176. assert check_res(tst1, [[[[0], [1]], [[0], [1]]], [[[2], [3]], [[-4], [-9]]], [[[0], [1]], [[0], [1]]],
  177. [[[2], [3]], [[-4], [-9]]]]), "\nATTENTION MAP BATCH FAILED\n" + str(tst1)
  178. batch_map_config(4, 2, 2, invert_sign_per_batch_multi_col, ["num", "num_square"], tst2)
  179. assert check_res(tst2, [[[[0], [1]], [[0], [1]]], [[[-2], [-3]], [[-4], [-9]]], [[[0], [1]], [[0], [1]]],
  180. [[[-2], [-3]], [[-4], [-9]]]]), "\nATTENTION MAP BATCH FAILED\n" + str(tst2)
  181. # the two tests below verify the order of the map.
  182. # num_square column adds 100, num column adds 300.
  183. batch_map_config(4, 3, 2, col1_col2_add_num, ["num_square", "num"], tst3)
  184. assert check_res(tst3, [[[[300], [301]], [[100], [101]]],
  185. [[[302], [303]], [[104], [109]]]] * 3), "\nATTENTION MAP BATCH FAILED\n" + str(tst3)
  186. # num column adds 100, num_square column adds 300.
  187. batch_map_config(4, 3, 2, col1_col2_add_num, ["num", "num_square"], tst4)
  188. assert check_res(tst4, [[[[100], [101]], [[300], [301]]],
  189. [[[102], [103]], [[304], [309]]]] * 3), "\nATTENTION MAP BATCH FAILED\n" + str(tst4)
  190. def test_var_batch_multi_col_map():
  191. def check_res(arr1, arr2):
  192. for ind, _ in enumerate(arr1):
  193. if not np.array_equal(arr1[ind], np.array(arr2[ind])):
  194. return False
  195. return len(arr1) == len(arr2)
  196. # gen 3 columns
  197. # first column: 0, 3, 6, 9 ... ...
  198. # second column:1, 4, 7, 10 ... ...
  199. # third column: 2, 5, 8, 11 ... ...
  200. def gen_3_cols(num):
  201. for i in range(num):
  202. yield (np.array([i * 3]), np.array([i * 3 + 1]), np.array([i * 3 + 2]))
  203. # first epoch batch_size per batch: 1, 2 ,3 ... ...
  204. # second epoch batch_size per batch: 2, 4, 6 ... ...
  205. # third epoch batch_size per batch: 3, 6 ,9 ... ...
  206. def batch_func(batchInfo):
  207. return (batchInfo.get_batch_num() + 1) * (batchInfo.get_epoch_num() + 1)
  208. # multiply first col by batch_num, multiply second col by -batch_num
  209. def map_func(col1, col2, batchInfo):
  210. return ([np.copy((1 + batchInfo.get_batch_num()) * arr) for arr in col1],
  211. [np.copy(-(1 + batchInfo.get_batch_num()) * arr) for arr in col2])
  212. def batch_map_config(num, r, fbatch, fmap, col_names, res):
  213. data1 = ds.GeneratorDataset((lambda: gen_3_cols(num)), ["col1", "col2", "col3"]) \
  214. .batch(batch_size=fbatch, input_columns=col_names, per_batch_map=fmap).repeat(r)
  215. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  216. res.append(np.array([item["col1"], item["col2"], item["col3"]]))
  217. tst1 = []
  218. tst1_res = [[[[0]], [[-1]], [[2]]], [[[6], [12]], [[-8], [-14]], [[5], [8]]],
  219. [[[27], [36], [45]], [[-30], [-39], [-48]], [[11], [14], [17]]],
  220. [[[72], [84], [96], [108]], [[-76], [-88], [-100], [-112]], [[20], [23], [26], [29]]]]
  221. batch_map_config(10, 1, batch_func, map_func, ["col1", "col2"], tst1)
  222. assert check_res(tst1, tst1_res), "test_var_batch_multi_col_map FAILED"
  223. def test_var_batch_var_resize():
  224. # fake resize image according to its batch number, if it's 5-th batch, resize to (5^2, 5^2) = (25, 25)
  225. def np_psedo_resize(col, batchInfo):
  226. s = (batchInfo.get_batch_num() + 1) ** 2
  227. return ([np.copy(c[0:s, 0:s, :]) for c in col],)
  228. def add_one(batchInfo):
  229. return batchInfo.get_batch_num() + 1
  230. data1 = ds.ImageFolderDataset("../data/dataset/testPK/data/", num_parallel_workers=4, decode=True)
  231. data1 = data1.batch(batch_size=add_one, drop_remainder=True, input_columns=["image"], per_batch_map=np_psedo_resize)
  232. # i-th batch has shape [i, i^2, i^2, 3]
  233. i = 1
  234. for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  235. assert item["image"].shape == (i, i ** 2, i ** 2, 3), "\ntest_var_batch_var_resize FAILED\n"
  236. i += 1
  237. def test_exception():
  238. def gen(num):
  239. for i in range(num):
  240. yield (np.array([i]),)
  241. def bad_batch_size(batchInfo):
  242. raise StopIteration
  243. # return batchInfo.get_batch_num()
  244. def bad_map_func(col, batchInfo):
  245. raise StopIteration
  246. # return (col,)
  247. data1 = ds.GeneratorDataset((lambda: gen(100)), ["num"]).batch(bad_batch_size)
  248. try:
  249. for _ in data1.create_dict_iterator(num_epochs=1):
  250. pass
  251. assert False
  252. except RuntimeError:
  253. pass
  254. data2 = ds.GeneratorDataset((lambda: gen(100)), ["num"]).batch(4, input_columns=["num"], per_batch_map=bad_map_func)
  255. try:
  256. for _ in data2.create_dict_iterator(num_epochs=1):
  257. pass
  258. assert False
  259. except RuntimeError:
  260. pass
  261. def test_multi_col_map():
  262. def gen_2_cols(num):
  263. for i in range(1, 1 + num):
  264. yield (np.array([i]), np.array([i ** 2]))
  265. def split_col(col, batchInfo):
  266. return ([np.copy(arr) for arr in col], [np.copy(-arr) for arr in col])
  267. def merge_col(col1, col2, batchInfo):
  268. merged = []
  269. for k, v in enumerate(col1):
  270. merged.append(np.array(v + col2[k]))
  271. return (merged,)
  272. def swap_col(col1, col2, batchInfo):
  273. return ([np.copy(a) for a in col2], [np.copy(b) for b in col1])
  274. def batch_map_config(num, s, f, in_nms, out_nms, col_order=None):
  275. try:
  276. dst = ds.GeneratorDataset((lambda: gen_2_cols(num)), ["col1", "col2"])
  277. dst = dst.batch(batch_size=s, input_columns=in_nms, output_columns=out_nms, per_batch_map=f,
  278. column_order=col_order)
  279. res = []
  280. for row in dst.create_dict_iterator(num_epochs=1, output_numpy=True):
  281. res.append(row)
  282. return res
  283. except (ValueError, RuntimeError, TypeError) as e:
  284. return str(e)
  285. # split 1 col into 2 cols
  286. res = batch_map_config(2, 2, split_col, ["col2"], ["col_x", "col_y"])[0]
  287. assert np.array_equal(res["col1"], [[1], [2]])
  288. assert np.array_equal(res["col_x"], [[1], [4]]) and np.array_equal(res["col_y"], [[-1], [-4]])
  289. # merge 2 cols into 1 col
  290. res = batch_map_config(4, 4, merge_col, ["col1", "col2"], ["merged"])[0]
  291. assert np.array_equal(res["merged"], [[2], [6], [12], [20]])
  292. # swap once
  293. res = batch_map_config(3, 3, swap_col, ["col1", "col2"], ["col1", "col2"])[0]
  294. assert np.array_equal(res["col1"], [[1], [4], [9]]) and np.array_equal(res["col2"], [[1], [2], [3]])
  295. # swap twice
  296. res = batch_map_config(3, 3, swap_col, ["col1", "col2"], ["col2", "col1"])[0]
  297. assert np.array_equal(res["col2"], [[1], [4], [9]]) and np.array_equal(res["col1"], [[1], [2], [3]])
  298. # test project after map
  299. res = batch_map_config(2, 2, split_col, ["col2"], ["col_x", "col_y"], ["col_x", "col_y", "col1"])[0]
  300. assert list(res.keys()) == ["col_x", "col_y", "col1"]
  301. # test the insertion order is maintained
  302. res = batch_map_config(2, 2, split_col, ["col2"], ["col_x", "col_y"], ["col1", "col_x", "col_y"])[0]
  303. assert list(res.keys()) == ["col1", "col_x", "col_y"]
  304. # test exceptions
  305. assert "output_columns with value 233 is not of type" in batch_map_config(2, 2, split_col, ["col2"], 233)
  306. assert "column_order with value 233 is not of type" in batch_map_config(2, 2, split_col, ["col2"], ["col1"], 233)
  307. assert "output_columns is NOT set correctly" in batch_map_config(2, 2, split_col, ["col2"], ["col1"])
  308. assert "Incorrect number of columns" in batch_map_config(2, 2, split_col, ["col2"], ["col3", "col4", "col5"])
  309. assert "col-1 doesn't exist" in batch_map_config(2, 2, split_col, ["col-1"], ["col_x", "col_y"])
  310. def test_exceptions_2():
  311. def gen(num):
  312. for i in range(num):
  313. yield (np.array([i]),)
  314. def simple_copy(colList, batchInfo):
  315. return ([np.copy(arr) for arr in colList],)
  316. def concat_copy(colList, batchInfo):
  317. # this will duplicate the number of rows returned, which would be wrong!
  318. return ([np.copy(arr) for arr in colList] * 2,)
  319. def shrink_copy(colList, batchInfo):
  320. # this will duplicate the number of rows returned, which would be wrong!
  321. return ([np.copy(arr) for arr in colList][0:int(len(colList) / 2)],)
  322. def test_exceptions_config(gen_num, batch_size, in_cols, per_batch_map):
  323. data1 = ds.GeneratorDataset((lambda: gen(gen_num)), ["num"]).batch(batch_size, input_columns=in_cols,
  324. per_batch_map=per_batch_map)
  325. try:
  326. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  327. pass
  328. return "success"
  329. except RuntimeError as e:
  330. return str(e)
  331. # test exception where column name is incorrect
  332. assert "error. col:num1 doesn't exist" in test_exceptions_config(4, 2, ["num1"], simple_copy)
  333. assert "expects: 2 rows returned from per_batch_map, gets: 4" in test_exceptions_config(4, 2, ["num"], concat_copy)
  334. assert "expects: 4 rows returned from per_batch_map, gets: 2" in test_exceptions_config(4, 4, ["num"], shrink_copy)
  335. if __name__ == '__main__':
  336. logger.info("Running test_var_batch_map.py test_batch_corner_cases() function")
  337. test_batch_corner_cases()
  338. logger.info("Running test_var_batch_map.py test_variable_size_batch() function")
  339. test_variable_size_batch()
  340. logger.info("Running test_var_batch_map.py test_basic_batch_map() function")
  341. test_basic_batch_map()
  342. logger.info("Running test_var_batch_map.py test_batch_multi_col_map() function")
  343. test_batch_multi_col_map()
  344. logger.info("Running test_var_batch_map.py tesgit t_var_batch_multi_col_map() function")
  345. test_var_batch_multi_col_map()
  346. logger.info("Running test_var_batch_map.py test_var_batch_var_resize() function")
  347. test_var_batch_var_resize()
  348. logger.info("Running test_var_batch_map.py test_exception() function")
  349. test_exception()
  350. logger.info("Running test_var_batch_map.py test_multi_col_map() function")
  351. test_multi_col_map()
  352. logger.info("Running test_var_batch_map.py test_exceptions_2() function")
  353. test_exceptions_2()