You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_repeat.py 7.3 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import numpy as np
  16. from util import save_and_check
  17. import mindspore.dataset as ds
  18. import mindspore.dataset.transforms.vision.c_transforms as vision
  19. from mindspore import log as logger
  20. DATA_DIR_TF = ["../data/dataset/testTFTestAllTypes/test.data"]
  21. SCHEMA_DIR_TF = "../data/dataset/testTFTestAllTypes/datasetSchema.json"
  22. COLUMNS_TF = ["col_1d", "col_2d", "col_3d", "col_binary", "col_float",
  23. "col_sint16", "col_sint32", "col_sint64"]
  24. GENERATE_GOLDEN = False
  25. IMG_DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  26. IMG_SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  27. DATA_DIR_TF2 = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  28. SCHEMA_DIR_TF2 = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  29. def test_tf_repeat_01():
  30. """
  31. Test a simple repeat operation.
  32. """
  33. logger.info("Test Simple Repeat")
  34. # define parameters
  35. repeat_count = 2
  36. parameters = {"params": {'repeat_count': repeat_count}}
  37. # apply dataset operations
  38. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  39. data1 = data1.repeat(repeat_count)
  40. filename = "repeat_result.npz"
  41. save_and_check(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  42. def test_tf_repeat_02():
  43. """
  44. Test Infinite Repeat.
  45. """
  46. logger.info("Test Infinite Repeat")
  47. # define parameters
  48. repeat_count = -1
  49. # apply dataset operations
  50. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  51. data1 = data1.repeat(repeat_count)
  52. itr = 0
  53. for _ in data1:
  54. itr = itr + 1
  55. if itr == 100:
  56. break
  57. assert itr == 100
  58. def test_tf_repeat_03():
  59. """
  60. Test Repeat then Batch.
  61. """
  62. logger.info("Test Repeat then Batch")
  63. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  64. batch_size = 32
  65. resize_height, resize_width = 32, 32
  66. decode_op = vision.Decode()
  67. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  68. data1 = data1.map(input_columns=["image"], operations=decode_op)
  69. data1 = data1.map(input_columns=["image"], operations=resize_op)
  70. data1 = data1.repeat(22)
  71. data1 = data1.batch(batch_size, drop_remainder=True)
  72. num_iter = 0
  73. for _ in data1.create_dict_iterator():
  74. num_iter += 1
  75. logger.info("Number of tf data in data1: {}".format(num_iter))
  76. assert num_iter == 2
  77. def test_tf_repeat_04():
  78. """
  79. Test a simple repeat operation with column list.
  80. """
  81. logger.info("Test Simple Repeat Column List")
  82. # define parameters
  83. repeat_count = 2
  84. parameters = {"params": {'repeat_count': repeat_count}}
  85. columns_list = ["col_sint64", "col_sint32"]
  86. # apply dataset operations
  87. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, columns_list=columns_list, shuffle=False)
  88. data1 = data1.repeat(repeat_count)
  89. filename = "repeat_list_result.npz"
  90. save_and_check(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  91. def generator():
  92. for i in range(3):
  93. (yield np.array([i]),)
  94. def test_nested_repeat1():
  95. data = ds.GeneratorDataset(generator, ["data"])
  96. data = data.repeat(2)
  97. data = data.repeat(3)
  98. for i, d in enumerate(data):
  99. assert i % 3 == d[0][0]
  100. assert sum([1 for _ in data]) == 2 * 3 * 3
  101. def test_nested_repeat2():
  102. data = ds.GeneratorDataset(generator, ["data"])
  103. data = data.repeat(1)
  104. data = data.repeat(1)
  105. for i, d in enumerate(data):
  106. assert i % 3 == d[0][0]
  107. assert sum([1 for _ in data]) == 3
  108. def test_nested_repeat3():
  109. data = ds.GeneratorDataset(generator, ["data"])
  110. data = data.repeat(1)
  111. data = data.repeat(2)
  112. for i, d in enumerate(data):
  113. assert i % 3 == d[0][0]
  114. assert sum([1 for _ in data]) == 2 * 3
  115. def test_nested_repeat4():
  116. data = ds.GeneratorDataset(generator, ["data"])
  117. data = data.repeat(2)
  118. data = data.repeat(1)
  119. for i, d in enumerate(data):
  120. assert i % 3 == d[0][0]
  121. assert sum([1 for _ in data]) == 2 * 3
  122. def test_nested_repeat5():
  123. data = ds.GeneratorDataset(generator, ["data"])
  124. data = data.batch(3)
  125. data = data.repeat(2)
  126. data = data.repeat(3)
  127. for _, d in enumerate(data):
  128. assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
  129. assert sum([1 for _ in data]) == 6
  130. def test_nested_repeat6():
  131. data = ds.GeneratorDataset(generator, ["data"])
  132. data = data.repeat(2)
  133. data = data.batch(3)
  134. data = data.repeat(3)
  135. for _, d in enumerate(data):
  136. assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
  137. assert sum([1 for _ in data]) == 6
  138. def test_nested_repeat7():
  139. data = ds.GeneratorDataset(generator, ["data"])
  140. data = data.repeat(2)
  141. data = data.repeat(3)
  142. data = data.batch(3)
  143. for _, d in enumerate(data):
  144. assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
  145. assert sum([1 for _ in data]) == 6
  146. def test_nested_repeat8():
  147. data = ds.GeneratorDataset(generator, ["data"])
  148. data = data.batch(2, drop_remainder=False)
  149. data = data.repeat(2)
  150. data = data.repeat(3)
  151. for i, d in enumerate(data):
  152. if i % 2 == 0:
  153. assert np.array_equal(d[0], np.asarray([[0], [1]]))
  154. else:
  155. assert np.array_equal(d[0], np.asarray([[2]]))
  156. assert sum([1 for _ in data]) == 6 * 2
  157. def test_nested_repeat9():
  158. data = ds.GeneratorDataset(generator, ["data"])
  159. data = data.repeat()
  160. data = data.repeat(3)
  161. for i, d in enumerate(data):
  162. assert i % 3 == d[0][0]
  163. if i == 10:
  164. break
  165. def test_nested_repeat10():
  166. data = ds.GeneratorDataset(generator, ["data"])
  167. data = data.repeat(3)
  168. data = data.repeat()
  169. for i, d in enumerate(data):
  170. assert i % 3 == d[0][0]
  171. if i == 10:
  172. break
  173. def test_nested_repeat11():
  174. data = ds.GeneratorDataset(generator, ["data"])
  175. data = data.repeat(2)
  176. data = data.repeat(3)
  177. data = data.repeat(4)
  178. data = data.repeat(5)
  179. for i, d in enumerate(data):
  180. assert i % 3 == d[0][0]
  181. assert sum([1 for _ in data]) == 2 * 3 * 4 * 5 * 3
  182. if __name__ == "__main__":
  183. test_tf_repeat_01()
  184. test_tf_repeat_02()
  185. test_tf_repeat_03()
  186. test_tf_repeat_04()
  187. test_nested_repeat1()
  188. test_nested_repeat2()
  189. test_nested_repeat3()
  190. test_nested_repeat4()
  191. test_nested_repeat5()
  192. test_nested_repeat6()
  193. test_nested_repeat7()
  194. test_nested_repeat8()
  195. test_nested_repeat9()
  196. test_nested_repeat10()
  197. test_nested_repeat11()