You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_skip.py 6.2 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import numpy as np
  16. import pytest
  17. import mindspore.dataset as ds
  18. import mindspore.dataset.vision.c_transforms as vision
  19. DATA_DIR_TF2 = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  20. SCHEMA_DIR_TF2 = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  21. def test_tf_skip():
  22. """
  23. a simple skip operation.
  24. """
  25. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  26. resize_height, resize_width = 32, 32
  27. decode_op = vision.Decode()
  28. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  29. data1 = data1.map(operations=decode_op, input_columns=["image"])
  30. data1 = data1.map(operations=resize_op, input_columns=["image"])
  31. data1 = data1.skip(2)
  32. num_iter = 0
  33. for _ in data1.create_dict_iterator(num_epochs=1):
  34. num_iter += 1
  35. assert num_iter == 1
  36. def generator_md():
  37. """
  38. create a dataset with [0, 1, 2, 3, 4]
  39. """
  40. for i in range(5):
  41. yield (np.array([i]),)
  42. def test_generator_skip():
  43. ds1 = ds.GeneratorDataset(generator_md, ["data"], num_parallel_workers=4)
  44. # Here ds1 should be [3, 4]
  45. ds1 = ds1.skip(3)
  46. buf = []
  47. for data in ds1.create_tuple_iterator(output_numpy=True):
  48. buf.append(data[0][0])
  49. assert len(buf) == 2
  50. assert buf == [3, 4]
  51. def test_skip_1():
  52. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  53. # Here ds1 should be []
  54. ds1 = ds1.skip(7)
  55. buf = []
  56. for data in ds1.create_tuple_iterator(output_numpy=True):
  57. buf.append(data[0][0])
  58. assert buf == []
  59. def test_skip_2():
  60. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  61. # Here ds1 should be [0, 1, 2, 3, 4]
  62. ds1 = ds1.skip(0)
  63. buf = []
  64. for data in ds1.create_tuple_iterator(output_numpy=True):
  65. buf.append(data[0][0])
  66. assert len(buf) == 5
  67. assert buf == [0, 1, 2, 3, 4]
  68. def test_skip_repeat_1():
  69. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  70. # Here ds1 should be [0, 1, 2, 3, 4, 0, 1, 2, 3, 4]
  71. ds1 = ds1.repeat(2)
  72. # Here ds1 should be [3, 4, 0, 1, 2, 3, 4]
  73. ds1 = ds1.skip(3)
  74. buf = []
  75. for data in ds1.create_tuple_iterator(output_numpy=True):
  76. buf.append(data[0][0])
  77. assert len(buf) == 7
  78. assert buf == [3, 4, 0, 1, 2, 3, 4]
  79. def test_skip_repeat_2():
  80. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  81. # Here ds1 should be [3, 4]
  82. ds1 = ds1.skip(3)
  83. # Here ds1 should be [3, 4, 3, 4]
  84. ds1 = ds1.repeat(2)
  85. buf = []
  86. for data in ds1.create_tuple_iterator(output_numpy=True):
  87. buf.append(data[0][0])
  88. assert len(buf) == 4
  89. assert buf == [3, 4, 3, 4]
  90. def test_skip_repeat_3():
  91. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  92. # Here ds1 should be [0, 1, 2, 3, 4, 0, 1, 2, 3, 4]
  93. ds1 = ds1.repeat(2)
  94. # Here ds1 should be [3, 4]
  95. ds1 = ds1.skip(8)
  96. # Here ds1 should be [3, 4, 3, 4, 3, 4]
  97. ds1 = ds1.repeat(3)
  98. buf = []
  99. for data in ds1.create_tuple_iterator(output_numpy=True):
  100. buf.append(data[0][0])
  101. assert len(buf) == 6
  102. assert buf == [3, 4, 3, 4, 3, 4]
  103. def test_skip_take_1():
  104. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  105. # Here ds1 should be [0, 1, 2, 3]
  106. ds1 = ds1.take(4)
  107. # Here ds1 should be [2, 3]
  108. ds1 = ds1.skip(2)
  109. buf = []
  110. for data in ds1.create_tuple_iterator(output_numpy=True):
  111. buf.append(data[0][0])
  112. assert len(buf) == 2
  113. assert buf == [2, 3]
  114. def test_skip_take_2():
  115. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  116. # Here ds1 should be [2, 3, 4]
  117. ds1 = ds1.skip(2)
  118. # Here ds1 should be [2, 3]
  119. ds1 = ds1.take(2)
  120. buf = []
  121. for data in ds1.create_tuple_iterator(output_numpy=True):
  122. buf.append(data[0][0])
  123. assert len(buf) == 2
  124. assert buf == [2, 3]
  125. def generator_1d():
  126. for i in range(64):
  127. yield (np.array([i]),)
  128. def test_skip_filter_1():
  129. dataset = ds.GeneratorDataset(generator_1d, ['data'])
  130. dataset = dataset.skip(5)
  131. dataset = dataset.filter(predicate=lambda data: data < 11, num_parallel_workers=4)
  132. buf = []
  133. for item in dataset.create_tuple_iterator(output_numpy=True):
  134. buf.append(item[0][0])
  135. assert buf == [5, 6, 7, 8, 9, 10]
  136. def test_skip_filter_2():
  137. dataset = ds.GeneratorDataset(generator_1d, ['data'])
  138. dataset = dataset.filter(predicate=lambda data: data < 11, num_parallel_workers=4)
  139. dataset = dataset.skip(5)
  140. buf = []
  141. for item in dataset.create_tuple_iterator(output_numpy=True):
  142. buf.append(item[0][0])
  143. assert buf == [5, 6, 7, 8, 9, 10]
  144. def test_skip_exception_1():
  145. data1 = ds.GeneratorDataset(generator_md, ["data"])
  146. try:
  147. data1 = data1.skip(count=-1)
  148. num_iter = 0
  149. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  150. num_iter += 1
  151. except ValueError as e:
  152. assert "Input count is not within the required interval" in str(e)
  153. def test_skip_exception_2():
  154. ds1 = ds.GeneratorDataset(generator_md, ["data"])
  155. with pytest.raises(ValueError) as e:
  156. ds1 = ds1.skip(-2)
  157. assert "Input count is not within the required interval" in str(e.value)
  158. if __name__ == "__main__":
  159. test_tf_skip()
  160. test_generator_skip()
  161. test_skip_1()
  162. test_skip_2()
  163. test_skip_repeat_1()
  164. test_skip_repeat_2()
  165. test_skip_repeat_3()
  166. test_skip_take_1()
  167. test_skip_take_2()
  168. test_skip_filter_1()
  169. test_skip_filter_2()
  170. test_skip_exception_1()
  171. test_skip_exception_2()