You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_map_offload.py 18 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382
  1. # Copyright 2021-2022 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import numpy as np
  16. import pytest
  17. import mindspore.dataset as ds
  18. import mindspore.common.dtype as mstype
  19. import mindspore.dataset.vision.c_transforms as C
  20. import mindspore.dataset.transforms.c_transforms as C2
  21. DATA_DIR = "../data/dataset/testPK/data"
  22. def test_offload():
  23. """
  24. Feature: test map offload flag.
  25. Description: Input is image dataset.
  26. Expectation: Output should be same with activated or deactivated offload.
  27. """
  28. # Dataset with offload activated.
  29. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  30. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image")
  31. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  32. dataset_0 = dataset_0.batch(8, drop_remainder=True)
  33. # Dataset with offload not activated.
  34. dataset_1 = ds.ImageFolderDataset(DATA_DIR)
  35. dataset_1 = dataset_1.map(operations=[C.Decode()], input_columns="image")
  36. dataset_1 = dataset_1.map(operations=[C.HWC2CHW()], input_columns="image")
  37. dataset_1 = dataset_1.batch(8, drop_remainder=True)
  38. for (img_0, _), (img_1, _) in zip(dataset_0.create_tuple_iterator(num_epochs=1, output_numpy=True),
  39. dataset_1.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  40. np.testing.assert_array_equal(img_0, img_1)
  41. def test_auto_offload():
  42. """
  43. Feature: Test auto_offload config option.
  44. Description: Input is image dataset.
  45. Expectation: Output should same with auto_offload activated and deactivated.
  46. """
  47. trans = [C.Decode(), C.HWC2CHW()]
  48. # Enable automatic offload
  49. ds.config.set_auto_offload(True)
  50. # Dataset with offload deactivated
  51. dataset_auto_disabled = ds.ImageFolderDataset(DATA_DIR)
  52. dataset_auto_disabled = dataset_auto_disabled.map(operations=trans, input_columns="image", offload=False)
  53. dataset_auto_disabled = dataset_auto_disabled.batch(8, drop_remainder=True)
  54. # Dataset with config.auto_offload activated
  55. dataset_auto_enabled = ds.ImageFolderDataset(DATA_DIR)
  56. dataset_auto_enabled = dataset_auto_enabled.map(operations=trans, input_columns="image")
  57. dataset_auto_enabled = dataset_auto_enabled.batch(8, drop_remainder=True)
  58. for (img_0, _), (img_1, _) in zip(dataset_auto_disabled.create_tuple_iterator(num_epochs=1, output_numpy=True),
  59. dataset_auto_enabled.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  60. np.testing.assert_array_equal(img_0, img_1)
  61. # Need to turn off here or subsequent test cases will fail.
  62. ds.config.set_auto_offload(False)
  63. def test_offload_column_validation():
  64. """
  65. Feature: Test the column validation for offloaded map operations
  66. Description: Input is an image dataset, but the input column is incorrect for the offloaded map operation.
  67. Expectation: Should raise RuntimeError.
  68. """
  69. dataset = ds.ImageFolderDataset(DATA_DIR)
  70. dataset = dataset.map(operations=[C.Decode()], input_columns="image")
  71. # Use invalid input column name
  72. dataset = dataset.map(operations=[C.HWC2CHW()], input_columns="fake_column", offload=True)
  73. dataset = dataset.batch(8, drop_remainder=True)
  74. error_msg = "The following input column(s) for an offloaded map operation do not exist: [\'fake_column\']"
  75. with pytest.raises(RuntimeError) as excinfo:
  76. for (_, _) in dataset.create_tuple_iterator(num_epochs=1, output_numpy=True):
  77. continue
  78. assert str(excinfo.value) == error_msg
  79. def test_offload_multi_column():
  80. """
  81. Feature: Test the offload functionality with datasets with more than 2 columns.
  82. Description: Input is an image dataset, copy the image column and apply map operations to both images.
  83. Expectation: Output should be same with both offload activated and deactivated.
  84. """
  85. def copy_column(x, y):
  86. return x, x, y
  87. dataset = ds.ImageFolderDataset(DATA_DIR)
  88. dataset = dataset.map(operations=copy_column, input_columns=["image", "label"],
  89. output_columns=["image1", "image2", "label"],
  90. column_order=["image1", "image2", "label"])
  91. dataset = dataset.map(operations=[C.Decode()], input_columns="image1")
  92. dataset = dataset.map(operations=[C.HWC2CHW()], input_columns="image1")
  93. dataset = dataset.map(operations=[C.Decode()], input_columns="image2")
  94. dataset = dataset.map(operations=[C.HWC2CHW()], input_columns="image2")
  95. dataset = dataset.batch(8, drop_remainder=True)
  96. dataset_offload = ds.ImageFolderDataset(DATA_DIR)
  97. dataset_offload = dataset_offload.map(operations=copy_column, input_columns=["image", "label"],
  98. output_columns=["image1", "image2", "label"],
  99. column_order=["image1", "image2", "label"])
  100. dataset_offload = dataset_offload.map(operations=[C.Decode()], input_columns="image1")
  101. dataset_offload = dataset_offload.map(operations=[C.HWC2CHW()], input_columns="image1", offload=True)
  102. dataset_offload = dataset_offload.map(operations=[C.Decode()], input_columns="image2")
  103. dataset_offload = dataset_offload.map(operations=[C.HWC2CHW()], input_columns="image2", offload=True)
  104. dataset_offload = dataset_offload.batch(8, drop_remainder=True)
  105. for (img1, img2, _), (img1_offload, img2_offload, _) in \
  106. zip(dataset.create_tuple_iterator(num_epochs=1, output_numpy=True),
  107. dataset_offload.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  108. np.testing.assert_array_equal(img1, img1_offload)
  109. np.testing.assert_array_equal(img2, img2_offload)
  110. def test_offload_column_mapping():
  111. """
  112. Feature: Test the dataset column mapping for offloaded operations
  113. Description: Input is an image dataset, copy the image column, then apply offload to only copied column.
  114. Expectation: The offload model dataset column index value is 1 (second column).
  115. """
  116. def copy_column(x, y):
  117. return x, x, y
  118. dataset = ds.ImageFolderDataset(DATA_DIR)
  119. dataset = dataset.map(operations=copy_column, input_columns=["image", "label"],
  120. output_columns=["image1", "image2", "label"], column_order=["image1", "image2", "label"])
  121. dataset = dataset.map(operations=[C.Decode()], input_columns="image2")
  122. dataset = dataset.map(operations=[C.HWC2CHW()], input_columns="image2", offload=True)
  123. dataset_iterator = dataset.create_tuple_iterator(num_epochs=1, output_numpy=True)
  124. offload_col_idxs = dataset_iterator.offload_model.transform_list[0].col_idxs
  125. # assert there is only one column index in the offload model, and that it is 1 (second column)
  126. np.testing.assert_((len(offload_col_idxs) == 1) and (offload_col_idxs[0] == 1))
  127. def test_offload_concat_dataset_1():
  128. """
  129. Feature: test map offload flag for concatenated dataset.
  130. Description: Input is image dataset.
  131. Expectation: Should raise RuntimeError.
  132. """
  133. # Dataset with offload activated.
  134. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  135. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image")
  136. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  137. dataset_0 = dataset_0.batch(8, drop_remainder=True)
  138. # Dataset with offload not activated.
  139. dataset_1 = ds.ImageFolderDataset(DATA_DIR)
  140. dataset_1 = dataset_1.map(operations=[C.Decode()], input_columns="image")
  141. dataset_1 = dataset_1.map(operations=[C.HWC2CHW()], input_columns="image")
  142. dataset_1 = dataset_1.batch(8, drop_remainder=True)
  143. dataset_concat = dataset_0 + dataset_1
  144. error_msg = "Offload module currently does not support concatenated or zipped datasets."
  145. with pytest.raises(RuntimeError, match=error_msg):
  146. for (_, _) in dataset_concat.create_tuple_iterator(num_epochs=1, output_numpy=True):
  147. continue
  148. def test_offload_concat_dataset_2():
  149. """
  150. Feature: test map offload flag for concatenated dataset.
  151. Description: Input is image dataset.
  152. Expectation: Should raise RuntimeError.
  153. """
  154. # Dataset with offload activated.
  155. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  156. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image")
  157. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  158. # Dataset with offload not activated.
  159. dataset_1 = ds.ImageFolderDataset(DATA_DIR)
  160. dataset_1 = dataset_1.map(operations=[C.Decode()], input_columns="image")
  161. dataset_1 = dataset_1.map(operations=[C.HWC2CHW()], input_columns="image")
  162. dataset_concat = dataset_0 + dataset_1
  163. dataset_concat = dataset_concat.batch(8, drop_remainder=True)
  164. error_msg = "Offload module currently does not support concatenated or zipped datasets."
  165. with pytest.raises(RuntimeError, match=error_msg):
  166. for (_, _) in dataset_concat.create_tuple_iterator(num_epochs=1, output_numpy=True):
  167. continue
  168. def test_offload_normalize_op():
  169. """
  170. Feature: test map offload Normalize op.
  171. Description: Input is image dataset.
  172. Expectation: Output should be same with activated or deactivated offload for Normalize op.
  173. """
  174. mean = [0.485 * 255, 0.456 * 255, 0.406 * 255]
  175. std = [0.229 * 255, 0.224 * 255, 0.225 * 255]
  176. # Dataset with offload activated.
  177. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  178. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image")
  179. dataset_0 = dataset_0.map(operations=[C.Normalize(mean=mean, std=std)], input_columns="image", offload=True)
  180. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  181. dataset_0 = dataset_0.batch(8, drop_remainder=True)
  182. # Dataset with offload not activated.
  183. dataset_1 = ds.ImageFolderDataset(DATA_DIR)
  184. dataset_1 = dataset_1.map(operations=[C.Decode()], input_columns="image")
  185. dataset_1 = dataset_1.map(operations=[C.Normalize(mean=mean, std=std)], input_columns="image")
  186. dataset_1 = dataset_1.map(operations=[C.HWC2CHW()], input_columns="image")
  187. dataset_1 = dataset_1.batch(8, drop_remainder=True)
  188. for (img_0, _), (img_1, _) in zip(dataset_0.create_tuple_iterator(num_epochs=1, output_numpy=True),
  189. dataset_1.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  190. np.testing.assert_almost_equal(img_0, img_1, decimal=6)
  191. def test_offload_rescale_op():
  192. """
  193. Feature: test map offload Rescale op.
  194. Description: Input is image dataset.
  195. Expectation: Output should be same with activated or deactivated offload for Rescale op.
  196. """
  197. rescale = 1.0 / 255.0
  198. shift = 0.0
  199. # Dataset with offload activated.
  200. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  201. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image")
  202. dataset_0 = dataset_0.map(operations=[C.Rescale(rescale, shift)], input_columns="image", offload=True)
  203. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  204. dataset_0 = dataset_0.batch(8, drop_remainder=True)
  205. # Dataset with offload not activated.
  206. dataset_1 = ds.ImageFolderDataset(DATA_DIR)
  207. dataset_1 = dataset_1.map(operations=[C.Decode()], input_columns="image")
  208. dataset_1 = dataset_1.map(operations=[C.Rescale(rescale, shift)], input_columns="image")
  209. dataset_1 = dataset_1.map(operations=[C.HWC2CHW()], input_columns="image")
  210. dataset_1 = dataset_1.batch(8, drop_remainder=True)
  211. for (img_0, _), (img_1, _) in zip(dataset_0.create_tuple_iterator(num_epochs=1, output_numpy=True),
  212. dataset_1.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  213. np.testing.assert_almost_equal(img_0, img_1, decimal=6)
  214. def test_offload_typecast_op():
  215. """
  216. Feature: test map offload TypeCast op.
  217. Description: Input is image dataset.
  218. Expectation: Output should be the same with activated or deactivated offload for TypeCast op.
  219. """
  220. # Dataset without offload activated.
  221. ds_baseline = ds.ImageFolderDataset(DATA_DIR)
  222. ds_baseline = ds_baseline.map(operations=[C.Decode(), C2.TypeCast(mstype.float32)], input_columns="image")
  223. ds_baseline = ds_baseline.map(operations=[C2.TypeCast(mstype.int32)], input_columns="label")
  224. # Dataset with offload activated.
  225. ds_offload = ds.ImageFolderDataset(DATA_DIR)
  226. ds_offload = ds_offload.map(operations=[C.Decode(), C2.TypeCast(mstype.float32)],
  227. input_columns="image", offload=True)
  228. ds_offload = ds_offload.map(operations=[C2.TypeCast(mstype.int32)], input_columns="label", offload=True)
  229. for (img_0, _), (img_1, _) in zip(ds_baseline.create_tuple_iterator(num_epochs=1, output_numpy=True),
  230. ds_offload.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  231. np.testing.assert_almost_equal(img_0, img_1, decimal=6)
  232. def test_offload_different_column_end_of_pipeline():
  233. """
  234. Feature: Test offload end_of_pipeline check.
  235. Description: Input is image dataset.
  236. Expectation: The image map op gets offloaded even though it comes before the not-offloaded label map op, since
  237. the end_of_pipeline check looks at columns separately.
  238. """
  239. image_trans = [C.Decode(), C.HWC2CHW()]
  240. ds.config.set_auto_offload(True)
  241. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  242. dataset_0 = dataset_0.map(operations=image_trans, input_columns="image")
  243. dataset_0 = dataset_0.map(operations=[C2.TypeCast(mstype.int32)], input_columns="label", offload=False)
  244. data_iterator = dataset_0.create_tuple_iterator(num_epochs=1, output_numpy=True)
  245. # Assert at least one operation has been offloaded
  246. np.testing.assert_(len(data_iterator.offload_model.transform_list[0].me_ops) > 0)
  247. ds.config.set_auto_offload(False)
  248. def test_offload_not_end_of_pipeline():
  249. """
  250. Feature: Test offload end_of_pipeline check.
  251. Description: Input is image dataset.
  252. Expectation: No operations are offloaded, since the image map op at the end of the pipeline has the
  253. offload flag set to False.
  254. """
  255. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  256. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image", offload=True)
  257. dataset_0 = dataset_0.map(operations=[C.RandomHorizontalFlip(prob=0.5)], input_columns="image", offload=True)
  258. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=False)
  259. dataset_0 = dataset_0.map(operations=[C2.TypeCast(mstype.int32)], input_columns="label", offload=False)
  260. data_iterator = dataset_0.create_tuple_iterator(num_epochs=1, output_numpy=True)
  261. # Assert no operations are set to be offloaded
  262. np.testing.assert_(data_iterator.offload_model is None)
  263. def test_offload_dim_check():
  264. """
  265. Feature: test input has the required number of dimensions for offload operation.
  266. Description: Input is image dataset.
  267. Expectation: Should raise ValueError.
  268. """
  269. # Dataset with offload activated.
  270. dataset = ds.ImageFolderDataset(DATA_DIR)
  271. dataset = dataset.map(operations=[C.Decode()], input_columns="image")
  272. dataset = dataset.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  273. error_msg = "For HwcToChw offload operation, the dimension of input should be 4, but got 3."
  274. with pytest.raises(ValueError, match=error_msg):
  275. for (_, _) in dataset.create_tuple_iterator(num_epochs=1, output_numpy=True):
  276. continue
  277. def test_offload_random_sharpness_op():
  278. """
  279. Feature: test map offload RandomSharpness op.
  280. Description: Input is image dataset.
  281. Expectation: Output should be same with activated or deactivated offload for RandomSharpness op.
  282. """
  283. # Dataset with offload activated.
  284. dataset_0 = ds.ImageFolderDataset(DATA_DIR)
  285. dataset_0 = dataset_0.map(operations=[C.Decode()], input_columns="image")
  286. dataset_0 = dataset_0.map(operations=[C.RandomSharpness(degrees=[1.0, 1.0])], input_columns="image", offload=True)
  287. dataset_0 = dataset_0.map(operations=[C.HWC2CHW()], input_columns="image", offload=True)
  288. dataset_0 = dataset_0.batch(8, drop_remainder=True)
  289. # Dataset with offload not activated.
  290. dataset_1 = ds.ImageFolderDataset(DATA_DIR)
  291. dataset_1 = dataset_1.map(operations=[C.Decode()], input_columns="image")
  292. dataset_1 = dataset_1.map(operations=[C.RandomSharpness(degrees=[1.0, 1.0])], input_columns="image")
  293. dataset_1 = dataset_1.map(operations=[C.HWC2CHW()], input_columns="image")
  294. dataset_1 = dataset_1.batch(8, drop_remainder=True)
  295. for (img_0, _), (img_1, _) in zip(dataset_0.create_tuple_iterator(num_epochs=1, output_numpy=True),
  296. dataset_1.create_tuple_iterator(num_epochs=1, output_numpy=True)):
  297. np.testing.assert_almost_equal(img_0, img_1, decimal=6)
  298. if __name__ == "__main__":
  299. test_offload()
  300. test_auto_offload()
  301. test_offload_column_validation()
  302. test_offload_column_mapping()
  303. test_offload_multi_column()
  304. test_offload_concat_dataset_1()
  305. test_offload_concat_dataset_2()
  306. test_offload_normalize_op()
  307. test_offload_rescale_op()
  308. test_offload_typecast_op()
  309. test_offload_different_column_end_of_pipeline()
  310. test_offload_not_end_of_pipeline()
  311. test_offload_dim_check()
  312. test_offload_random_sharpness_op()