You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_config.py 14 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Testing configuration manager
  17. """
  18. import os
  19. import filecmp
  20. import glob
  21. import numpy as np
  22. import mindspore.dataset as ds
  23. import mindspore.dataset.transforms.vision.c_transforms as vision
  24. import mindspore.dataset.transforms.vision.py_transforms as py_vision
  25. from mindspore import log as logger
  26. DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  27. SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  28. def test_basic():
  29. """
  30. Test basic configuration functions
  31. """
  32. # Save original configuration values
  33. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  34. prefetch_size_original = ds.config.get_prefetch_size()
  35. seed_original = ds.config.get_seed()
  36. ds.config.load('../data/dataset/declient.cfg')
  37. # assert ds.config.get_rows_per_buffer() == 32
  38. assert ds.config.get_num_parallel_workers() == 4
  39. # assert ds.config.get_worker_connector_size() == 16
  40. assert ds.config.get_prefetch_size() == 16
  41. assert ds.config.get_seed() == 5489
  42. # ds.config.set_rows_per_buffer(1)
  43. ds.config.set_num_parallel_workers(2)
  44. # ds.config.set_worker_connector_size(3)
  45. ds.config.set_prefetch_size(4)
  46. ds.config.set_seed(5)
  47. # assert ds.config.get_rows_per_buffer() == 1
  48. assert ds.config.get_num_parallel_workers() == 2
  49. # assert ds.config.get_worker_connector_size() == 3
  50. assert ds.config.get_prefetch_size() == 4
  51. assert ds.config.get_seed() == 5
  52. # Restore original configuration values
  53. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  54. ds.config.set_prefetch_size(prefetch_size_original)
  55. ds.config.set_seed(seed_original)
  56. def test_get_seed():
  57. """
  58. This gets the seed value without explicitly setting a default, expect int.
  59. """
  60. assert isinstance(ds.config.get_seed(), int)
  61. def test_pipeline():
  62. """
  63. Test that our configuration pipeline works when we set parameters at different locations in dataset code
  64. """
  65. # Save original configuration values
  66. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  67. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, shuffle=False)
  68. ds.config.set_num_parallel_workers(2)
  69. data1 = data1.map(input_columns=["image"], operations=[vision.Decode(True)])
  70. ds.serialize(data1, "testpipeline.json")
  71. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, shuffle=False)
  72. ds.config.set_num_parallel_workers(4)
  73. data2 = data2.map(input_columns=["image"], operations=[vision.Decode(True)])
  74. ds.serialize(data2, "testpipeline2.json")
  75. # check that the generated output is different
  76. assert filecmp.cmp('testpipeline.json', 'testpipeline2.json')
  77. # this test passes currently because our num_parallel_workers don't get updated.
  78. # remove generated jason files
  79. file_list = glob.glob('*.json')
  80. for f in file_list:
  81. try:
  82. os.remove(f)
  83. except IOError:
  84. logger.info("Error while deleting: {}".format(f))
  85. # Restore original configuration values
  86. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  87. def test_deterministic_run_fail():
  88. """
  89. Test RandomCrop with seed, expected to fail
  90. """
  91. logger.info("test_deterministic_run_fail")
  92. # Save original configuration values
  93. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  94. seed_original = ds.config.get_seed()
  95. # when we set the seed all operations within our dataset should be deterministic
  96. ds.config.set_seed(0)
  97. ds.config.set_num_parallel_workers(1)
  98. # First dataset
  99. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  100. # Assuming we get the same seed on calling constructor, if this op is re-used then result won't be
  101. # the same in between the two datasets. For example, RandomCrop constructor takes seed (0)
  102. # outputs a deterministic series of numbers, e,g "a" = [1, 2, 3, 4, 5, 6] <- pretend these are random
  103. random_crop_op = vision.RandomCrop([512, 512], [200, 200, 200, 200])
  104. decode_op = vision.Decode()
  105. data1 = data1.map(input_columns=["image"], operations=decode_op)
  106. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  107. # Second dataset
  108. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  109. data2 = data2.map(input_columns=["image"], operations=decode_op)
  110. # If seed is set up on constructor
  111. data2 = data2.map(input_columns=["image"], operations=random_crop_op)
  112. try:
  113. for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
  114. np.testing.assert_equal(item1["image"], item2["image"])
  115. except Exception as e:
  116. # two datasets split the number out of the sequence a
  117. logger.info("Got an exception in DE: {}".format(str(e)))
  118. assert "Array" in str(e)
  119. # Restore original configuration values
  120. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  121. ds.config.set_seed(seed_original)
  122. def test_deterministic_run_pass():
  123. """
  124. Test deterministic run with with setting the seed
  125. """
  126. logger.info("test_deterministic_run_pass")
  127. # Save original configuration values
  128. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  129. seed_original = ds.config.get_seed()
  130. ds.config.set_seed(0)
  131. ds.config.set_num_parallel_workers(1)
  132. # First dataset
  133. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  134. # We get the seed when constructor is called
  135. random_crop_op = vision.RandomCrop([512, 512], [200, 200, 200, 200])
  136. decode_op = vision.Decode()
  137. data1 = data1.map(input_columns=["image"], operations=decode_op)
  138. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  139. # Second dataset
  140. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  141. data2 = data2.map(input_columns=["image"], operations=decode_op)
  142. # Since seed is set up on constructor, so the two ops output deterministic sequence.
  143. # Assume the generated random sequence "a" = [1, 2, 3, 4, 5, 6] <- pretend these are random
  144. random_crop_op2 = vision.RandomCrop([512, 512], [200, 200, 200, 200])
  145. data2 = data2.map(input_columns=["image"], operations=random_crop_op2)
  146. try:
  147. for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
  148. np.testing.assert_equal(item1["image"], item2["image"])
  149. except Exception as e:
  150. # two datasets both use numbers from the generated sequence "a"
  151. logger.info("Got an exception in DE: {}".format(str(e)))
  152. assert "Array" in str(e)
  153. # Restore original configuration values
  154. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  155. ds.config.set_seed(seed_original)
  156. def test_seed_undeterministic():
  157. """
  158. Test seed with num parallel workers in c, this test is expected to fail some of the time
  159. """
  160. logger.info("test_seed_undeterministic")
  161. # Save original configuration values
  162. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  163. seed_original = ds.config.get_seed()
  164. ds.config.set_seed(0)
  165. ds.config.set_num_parallel_workers(1)
  166. # First dataset
  167. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  168. # seed will be read in during constructor call
  169. random_crop_op = vision.RandomCrop([512, 512], [200, 200, 200, 200])
  170. decode_op = vision.Decode()
  171. data1 = data1.map(input_columns=["image"], operations=decode_op)
  172. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  173. # Second dataset
  174. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  175. data2 = data2.map(input_columns=["image"], operations=decode_op)
  176. # If seed is set up on constructor, so the two ops output deterministic sequence
  177. random_crop_op2 = vision.RandomCrop([512, 512], [200, 200, 200, 200])
  178. data2 = data2.map(input_columns=["image"], operations=random_crop_op2)
  179. for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
  180. np.testing.assert_equal(item1["image"], item2["image"])
  181. # Restore original configuration values
  182. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  183. ds.config.set_seed(seed_original)
  184. def test_deterministic_run_distribution():
  185. """
  186. Test deterministic run with with setting the seed being used in a distribution
  187. """
  188. logger.info("test_deterministic_run_distribution")
  189. # Save original configuration values
  190. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  191. seed_original = ds.config.get_seed()
  192. # when we set the seed all operations within our dataset should be deterministic
  193. ds.config.set_seed(0)
  194. ds.config.set_num_parallel_workers(1)
  195. # First dataset
  196. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  197. random_crop_op = vision.RandomHorizontalFlip(0.1)
  198. decode_op = vision.Decode()
  199. data1 = data1.map(input_columns=["image"], operations=decode_op)
  200. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  201. # Second dataset
  202. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  203. data2 = data2.map(input_columns=["image"], operations=decode_op)
  204. # If seed is set up on constructor, so the two ops output deterministic sequence
  205. random_crop_op2 = vision.RandomHorizontalFlip(0.1)
  206. data2 = data2.map(input_columns=["image"], operations=random_crop_op2)
  207. for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
  208. np.testing.assert_equal(item1["image"], item2["image"])
  209. # Restore original configuration values
  210. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  211. ds.config.set_seed(seed_original)
  212. def test_deterministic_python_seed():
  213. """
  214. Test deterministic execution with seed in python
  215. """
  216. logger.info("deterministic_random_crop_op_python_2")
  217. # Save original configuration values
  218. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  219. seed_original = ds.config.get_seed()
  220. ds.config.set_seed(0)
  221. ds.config.set_num_parallel_workers(1)
  222. # First dataset
  223. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  224. transforms = [
  225. py_vision.Decode(),
  226. py_vision.RandomCrop([512, 512], [200, 200, 200, 200]),
  227. py_vision.ToTensor(),
  228. ]
  229. transform = py_vision.ComposeOp(transforms)
  230. data1 = data1.map(input_columns=["image"], operations=transform())
  231. data1_output = []
  232. # config.set_seed() calls random.seed()
  233. for data_one in data1.create_dict_iterator():
  234. data1_output.append(data_one["image"])
  235. # Second dataset
  236. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  237. data2 = data2.map(input_columns=["image"], operations=transform())
  238. # config.set_seed() calls random.seed(), resets seed for next dataset iterator
  239. ds.config.set_seed(0)
  240. data2_output = []
  241. for data_two in data2.create_dict_iterator():
  242. data2_output.append(data_two["image"])
  243. np.testing.assert_equal(data1_output, data2_output)
  244. # Restore original configuration values
  245. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  246. ds.config.set_seed(seed_original)
  247. def test_deterministic_python_seed_multi_thread():
  248. """
  249. Test deterministic execution with seed in python, this fails with multi-thread pyfunc run
  250. """
  251. logger.info("deterministic_random_crop_op_python_2")
  252. # Save original configuration values
  253. seed_original = ds.config.get_seed()
  254. ds.config.set_seed(0)
  255. # when we set the seed all operations within our dataset should be deterministic
  256. # First dataset
  257. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  258. transforms = [
  259. py_vision.Decode(),
  260. py_vision.RandomCrop([512, 512], [200, 200, 200, 200]),
  261. py_vision.ToTensor(),
  262. ]
  263. transform = py_vision.ComposeOp(transforms)
  264. data1 = data1.map(input_columns=["image"], operations=transform(), python_multiprocessing=True)
  265. data1_output = []
  266. # config.set_seed() calls random.seed()
  267. for data_one in data1.create_dict_iterator():
  268. data1_output.append(data_one["image"])
  269. # Second dataset
  270. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  271. # If seed is set up on constructor
  272. data2 = data2.map(input_columns=["image"], operations=transform(), python_multiprocessing=True)
  273. # config.set_seed() calls random.seed()
  274. ds.config.set_seed(0)
  275. data2_output = []
  276. for data_two in data2.create_dict_iterator():
  277. data2_output.append(data_two["image"])
  278. try:
  279. np.testing.assert_equal(data1_output, data2_output)
  280. except Exception as e:
  281. # expect output to not match during multi-threaded excution
  282. logger.info("Got an exception in DE: {}".format(str(e)))
  283. assert "Array" in str(e)
  284. # Restore original configuration values
  285. ds.config.set_seed(seed_original)
  286. if __name__ == '__main__':
  287. test_basic()
  288. test_pipeline()
  289. test_deterministic_run_pass()
  290. test_deterministic_run_distribution()
  291. test_deterministic_run_fail()
  292. test_deterministic_python_seed()
  293. test_seed_undeterministic()
  294. test_get_seed()