You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_repeat.py 10 kB

5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Test Repeat Op
  17. """
  18. import numpy as np
  19. import pytest
  20. import mindspore.dataset as ds
  21. import mindspore.dataset.vision.c_transforms as vision
  22. from mindspore import log as logger
  23. from util import save_and_check_dict
  24. DATA_DIR_TF = ["../data/dataset/testTFTestAllTypes/test.data"]
  25. SCHEMA_DIR_TF = "../data/dataset/testTFTestAllTypes/datasetSchema.json"
  26. DATA_DIR_TF2 = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  27. SCHEMA_DIR_TF2 = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  28. GENERATE_GOLDEN = False
  29. def test_tf_repeat_01():
  30. """
  31. Test a simple repeat operation.
  32. """
  33. logger.info("Test Simple Repeat")
  34. # define parameters
  35. repeat_count = 2
  36. # apply dataset operations
  37. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  38. data1 = data1.repeat(repeat_count)
  39. filename = "repeat_result.npz"
  40. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  41. def test_tf_repeat_02():
  42. """
  43. Test Infinite Repeat.
  44. """
  45. logger.info("Test Infinite Repeat")
  46. # define parameters
  47. repeat_count = -1
  48. # apply dataset operations
  49. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  50. data1 = data1.repeat(repeat_count)
  51. itr = 0
  52. for _ in data1:
  53. itr = itr + 1
  54. if itr == 100:
  55. break
  56. assert itr == 100
  57. def test_tf_repeat_03():
  58. """
  59. Test Repeat then Batch.
  60. """
  61. logger.info("Test Repeat then Batch")
  62. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  63. batch_size = 32
  64. resize_height, resize_width = 32, 32
  65. decode_op = vision.Decode()
  66. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  67. data1 = data1.map(operations=decode_op, input_columns=["image"])
  68. data1 = data1.map(operations=resize_op, input_columns=["image"])
  69. data1 = data1.repeat(22)
  70. data1 = data1.batch(batch_size, drop_remainder=True)
  71. num_iter = 0
  72. for _ in data1.create_dict_iterator(num_epochs=1):
  73. num_iter += 1
  74. logger.info("Number of tf data in data1: {}".format(num_iter))
  75. assert num_iter == 2
  76. def test_tf_repeat_04():
  77. """
  78. Test a simple repeat operation with column list.
  79. """
  80. logger.info("Test Simple Repeat Column List")
  81. # define parameters
  82. repeat_count = 2
  83. columns_list = ["col_sint64", "col_sint32"]
  84. # apply dataset operations
  85. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, columns_list=columns_list, shuffle=False)
  86. data1 = data1.repeat(repeat_count)
  87. filename = "repeat_list_result.npz"
  88. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  89. def generator():
  90. for i in range(3):
  91. (yield np.array([i]),)
  92. def test_nested_repeat1():
  93. logger.info("test_nested_repeat1")
  94. data = ds.GeneratorDataset(generator, ["data"])
  95. data = data.repeat(2)
  96. data = data.repeat(3)
  97. for i, d in enumerate(data.create_tuple_iterator(output_numpy=True)):
  98. assert i % 3 == d[0][0]
  99. assert sum([1 for _ in data]) == 2 * 3 * 3
  100. def test_nested_repeat2():
  101. logger.info("test_nested_repeat2")
  102. data = ds.GeneratorDataset(generator, ["data"])
  103. data = data.repeat(1)
  104. data = data.repeat(1)
  105. for i, d in enumerate(data.create_tuple_iterator(output_numpy=True)):
  106. assert i % 3 == d[0][0]
  107. assert sum([1 for _ in data]) == 3
  108. def test_nested_repeat3():
  109. logger.info("test_nested_repeat3")
  110. data = ds.GeneratorDataset(generator, ["data"])
  111. data = data.repeat(1)
  112. data = data.repeat(2)
  113. for i, d in enumerate(data.create_tuple_iterator(output_numpy=True)):
  114. assert i % 3 == d[0][0]
  115. assert sum([1 for _ in data]) == 2 * 3
  116. def test_nested_repeat4():
  117. logger.info("test_nested_repeat4")
  118. data = ds.GeneratorDataset(generator, ["data"])
  119. data = data.repeat(2)
  120. data = data.repeat(1)
  121. for i, d in enumerate(data.create_tuple_iterator(output_numpy=True)):
  122. assert i % 3 == d[0][0]
  123. assert sum([1 for _ in data]) == 2 * 3
  124. def test_nested_repeat5():
  125. logger.info("test_nested_repeat5")
  126. data = ds.GeneratorDataset(generator, ["data"])
  127. data = data.batch(3)
  128. data = data.repeat(2)
  129. data = data.repeat(3)
  130. for _, d in enumerate(data):
  131. np.testing.assert_array_equal(d[0].asnumpy(), np.asarray([[0], [1], [2]]))
  132. assert sum([1 for _ in data]) == 6
  133. def test_nested_repeat6():
  134. logger.info("test_nested_repeat6")
  135. data = ds.GeneratorDataset(generator, ["data"])
  136. data = data.repeat(2)
  137. data = data.batch(3)
  138. data = data.repeat(3)
  139. for _, d in enumerate(data):
  140. np.testing.assert_array_equal(d[0].asnumpy(), np.asarray([[0], [1], [2]]))
  141. assert sum([1 for _ in data]) == 6
  142. def test_nested_repeat7():
  143. logger.info("test_nested_repeat7")
  144. data = ds.GeneratorDataset(generator, ["data"])
  145. data = data.repeat(2)
  146. data = data.repeat(3)
  147. data = data.batch(3)
  148. for _, d in enumerate(data):
  149. np.testing.assert_array_equal(d[0].asnumpy(), np.asarray([[0], [1], [2]]))
  150. assert sum([1 for _ in data]) == 6
  151. def test_nested_repeat8():
  152. logger.info("test_nested_repeat8")
  153. data = ds.GeneratorDataset(generator, ["data"])
  154. data = data.batch(2, drop_remainder=False)
  155. data = data.repeat(2)
  156. data = data.repeat(3)
  157. for i, d in enumerate(data):
  158. if i % 2 == 0:
  159. np.testing.assert_array_equal(d[0].asnumpy(), np.asarray([[0], [1]]))
  160. else:
  161. np.testing.assert_array_equal(d[0].asnumpy(), np.asarray([[2]]))
  162. assert sum([1 for _ in data]) == 6 * 2
  163. def test_nested_repeat9():
  164. logger.info("test_nested_repeat9")
  165. data = ds.GeneratorDataset(generator, ["data"])
  166. data = data.repeat()
  167. data = data.repeat(3)
  168. for i, d in enumerate(data):
  169. assert i % 3 == d[0].asnumpy()[0]
  170. if i == 10:
  171. break
  172. def test_nested_repeat10():
  173. logger.info("test_nested_repeat10")
  174. data = ds.GeneratorDataset(generator, ["data"])
  175. data = data.repeat(3)
  176. data = data.repeat()
  177. for i, d in enumerate(data):
  178. assert i % 3 == d[0].asnumpy()[0]
  179. if i == 10:
  180. break
  181. def test_nested_repeat11():
  182. logger.info("test_nested_repeat11")
  183. data = ds.GeneratorDataset(generator, ["data"])
  184. data = data.repeat(2)
  185. data = data.repeat(3)
  186. data = data.repeat(4)
  187. data = data.repeat(5)
  188. for i, d in enumerate(data):
  189. assert i % 3 == d[0].asnumpy()[0]
  190. assert sum([1 for _ in data]) == 2 * 3 * 4 * 5 * 3
  191. def test_repeat_count1():
  192. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  193. data1_size = data1.get_dataset_size()
  194. logger.info("dataset size is {}".format(data1_size))
  195. batch_size = 2
  196. repeat_count = 4
  197. resize_height, resize_width = 32, 32
  198. decode_op = vision.Decode()
  199. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  200. data1 = data1.map(operations=decode_op, input_columns=["image"])
  201. data1 = data1.map(operations=resize_op, input_columns=["image"])
  202. data1 = data1.repeat(repeat_count)
  203. data1 = data1.batch(batch_size, drop_remainder=False)
  204. dataset_size = data1.get_dataset_size()
  205. logger.info("dataset repeat then batch's size is {}".format(dataset_size))
  206. num1_iter = 0
  207. for _ in data1.create_dict_iterator(num_epochs=1):
  208. num1_iter += 1
  209. assert data1_size == 3
  210. assert dataset_size == num1_iter == 6
  211. def test_repeat_count2():
  212. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  213. data1_size = data1.get_dataset_size()
  214. logger.info("dataset size is {}".format(data1_size))
  215. batch_size = 2
  216. repeat_count = 4
  217. resize_height, resize_width = 32, 32
  218. decode_op = vision.Decode()
  219. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  220. data1 = data1.map(operations=decode_op, input_columns=["image"])
  221. data1 = data1.map(operations=resize_op, input_columns=["image"])
  222. data1 = data1.batch(batch_size, drop_remainder=False)
  223. data1 = data1.repeat(repeat_count)
  224. dataset_size = data1.get_dataset_size()
  225. logger.info("dataset batch then repeat's size is {}".format(dataset_size))
  226. num1_iter = 0
  227. for _ in data1.create_dict_iterator(num_epochs=1):
  228. num1_iter += 1
  229. assert data1_size == 3
  230. assert dataset_size == num1_iter == 8
  231. def test_repeat_count0():
  232. """
  233. Test Repeat with invalid count 0.
  234. """
  235. logger.info("Test Repeat with invalid count 0")
  236. with pytest.raises(ValueError) as info:
  237. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  238. data1.repeat(0)
  239. assert "count" in str(info.value)
  240. def test_repeat_countneg2():
  241. """
  242. Test Repeat with invalid count -2.
  243. """
  244. logger.info("Test Repeat with invalid count -2")
  245. with pytest.raises(ValueError) as info:
  246. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  247. data1.repeat(-2)
  248. assert "count" in str(info.value)
  249. if __name__ == "__main__":
  250. test_tf_repeat_01()
  251. test_tf_repeat_02()
  252. test_tf_repeat_03()
  253. test_tf_repeat_04()
  254. test_nested_repeat1()
  255. test_nested_repeat2()
  256. test_nested_repeat3()
  257. test_nested_repeat4()
  258. test_nested_repeat5()
  259. test_nested_repeat6()
  260. test_nested_repeat7()
  261. test_nested_repeat8()
  262. test_nested_repeat9()
  263. test_nested_repeat10()
  264. test_nested_repeat11()
  265. test_repeat_count1()
  266. test_repeat_count2()
  267. test_repeat_count0()
  268. test_repeat_countneg2()