You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_config.py 14 kB

5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Testing configuration manager
  17. """
  18. import os
  19. import filecmp
  20. import glob
  21. import numpy as np
  22. import mindspore.dataset as ds
  23. import mindspore.dataset.transforms.vision.c_transforms as c_vision
  24. import mindspore.dataset.transforms.vision.py_transforms as py_vision
  25. from mindspore import log as logger
  26. from util import dataset_equal
  27. DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  28. SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  29. def test_basic():
  30. """
  31. Test basic configuration functions
  32. """
  33. # Save original configuration values
  34. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  35. prefetch_size_original = ds.config.get_prefetch_size()
  36. seed_original = ds.config.get_seed()
  37. monitor_sampling_interval_original = ds.config.get_monitor_sampling_interval()
  38. ds.config.load('../data/dataset/declient.cfg')
  39. # assert ds.config.get_rows_per_buffer() == 32
  40. assert ds.config.get_num_parallel_workers() == 4
  41. # assert ds.config.get_worker_connector_size() == 16
  42. assert ds.config.get_prefetch_size() == 16
  43. assert ds.config.get_seed() == 5489
  44. assert ds.config.get_monitor_sampling_interval() == 15
  45. # ds.config.set_rows_per_buffer(1)
  46. ds.config.set_num_parallel_workers(2)
  47. # ds.config.set_worker_connector_size(3)
  48. ds.config.set_prefetch_size(4)
  49. ds.config.set_seed(5)
  50. ds.config.set_monitor_sampling_interval(45)
  51. # assert ds.config.get_rows_per_buffer() == 1
  52. assert ds.config.get_num_parallel_workers() == 2
  53. # assert ds.config.get_worker_connector_size() == 3
  54. assert ds.config.get_prefetch_size() == 4
  55. assert ds.config.get_seed() == 5
  56. assert ds.config.get_monitor_sampling_interval() == 45
  57. # Restore original configuration values
  58. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  59. ds.config.set_prefetch_size(prefetch_size_original)
  60. ds.config.set_seed(seed_original)
  61. ds.config.set_monitor_sampling_interval(monitor_sampling_interval_original)
  62. def test_get_seed():
  63. """
  64. This gets the seed value without explicitly setting a default, expect int.
  65. """
  66. assert isinstance(ds.config.get_seed(), int)
  67. def test_pipeline():
  68. """
  69. Test that our configuration pipeline works when we set parameters at different locations in dataset code
  70. """
  71. # Save original configuration values
  72. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  73. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, shuffle=False)
  74. data1 = data1.map(input_columns=["image"], operations=[c_vision.Decode(True)])
  75. ds.serialize(data1, "testpipeline.json")
  76. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, num_parallel_workers=num_parallel_workers_original,
  77. shuffle=False)
  78. data2 = data2.map(input_columns=["image"], operations=[c_vision.Decode(True)])
  79. ds.serialize(data2, "testpipeline2.json")
  80. # check that the generated output is different
  81. assert filecmp.cmp('testpipeline.json', 'testpipeline2.json')
  82. # this test passes currently because our num_parallel_workers don't get updated.
  83. # remove generated jason files
  84. file_list = glob.glob('*.json')
  85. for f in file_list:
  86. try:
  87. os.remove(f)
  88. except IOError:
  89. logger.info("Error while deleting: {}".format(f))
  90. # Restore original configuration values
  91. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  92. def test_deterministic_run_fail():
  93. """
  94. Test RandomCrop with seed, expected to fail
  95. """
  96. logger.info("test_deterministic_run_fail")
  97. # Save original configuration values
  98. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  99. seed_original = ds.config.get_seed()
  100. # when we set the seed all operations within our dataset should be deterministic
  101. ds.config.set_seed(0)
  102. ds.config.set_num_parallel_workers(1)
  103. # First dataset
  104. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  105. # Assuming we get the same seed on calling constructor, if this op is re-used then result won't be
  106. # the same in between the two datasets. For example, RandomCrop constructor takes seed (0)
  107. # outputs a deterministic series of numbers, e,g "a" = [1, 2, 3, 4, 5, 6] <- pretend these are random
  108. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  109. decode_op = c_vision.Decode()
  110. data1 = data1.map(input_columns=["image"], operations=decode_op)
  111. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  112. # Second dataset
  113. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  114. data2 = data2.map(input_columns=["image"], operations=decode_op)
  115. # If seed is set up on constructor
  116. data2 = data2.map(input_columns=["image"], operations=random_crop_op)
  117. try:
  118. dataset_equal(data1, data2, 0)
  119. except Exception as e:
  120. # two datasets split the number out of the sequence a
  121. logger.info("Got an exception in DE: {}".format(str(e)))
  122. assert "Array" in str(e)
  123. # Restore original configuration values
  124. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  125. ds.config.set_seed(seed_original)
  126. def test_seed_undeterministic():
  127. """
  128. Test seed with num parallel workers in c, this test is expected to fail some of the time
  129. """
  130. logger.info("test_seed_undeterministic")
  131. # Save original configuration values
  132. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  133. seed_original = ds.config.get_seed()
  134. ds.config.set_seed(0)
  135. ds.config.set_num_parallel_workers(3)
  136. # First dataset
  137. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  138. # We get the seed when constructor is called
  139. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  140. decode_op = c_vision.Decode()
  141. data1 = data1.map(input_columns=["image"], operations=decode_op)
  142. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  143. # Second dataset
  144. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  145. data2 = data2.map(input_columns=["image"], operations=decode_op)
  146. # Since seed is set up on constructor, so the two ops output deterministic sequence.
  147. # Assume the generated random sequence "a" = [1, 2, 3, 4, 5, 6] <- pretend these are random
  148. random_crop_op2 = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  149. data2 = data2.map(input_columns=["image"], operations=random_crop_op2)
  150. try:
  151. dataset_equal(data1, data2, 0)
  152. except Exception as e:
  153. # two datasets both use numbers from the generated sequence "a"
  154. logger.info("Got an exception in DE: {}".format(str(e)))
  155. assert "Array" in str(e)
  156. # Restore original configuration values
  157. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  158. ds.config.set_seed(seed_original)
  159. def test_seed_deterministic():
  160. """
  161. Test deterministic run with setting the seed, only works with num_parallel worker = 1
  162. """
  163. logger.info("test_seed_deterministic")
  164. # Save original configuration values
  165. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  166. seed_original = ds.config.get_seed()
  167. ds.config.set_seed(0)
  168. ds.config.set_num_parallel_workers(1)
  169. # First dataset
  170. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  171. # seed will be read in during constructor call
  172. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  173. decode_op = c_vision.Decode()
  174. data1 = data1.map(input_columns=["image"], operations=decode_op)
  175. data1 = data1.map(input_columns=["image"], operations=random_crop_op)
  176. # Second dataset
  177. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  178. data2 = data2.map(input_columns=["image"], operations=decode_op)
  179. # If seed is set up on constructor, so the two ops output deterministic sequence
  180. random_crop_op2 = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  181. data2 = data2.map(input_columns=["image"], operations=random_crop_op2)
  182. dataset_equal(data1, data2, 0)
  183. # Restore original configuration values
  184. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  185. ds.config.set_seed(seed_original)
  186. def test_deterministic_run_distribution():
  187. """
  188. Test deterministic run with with setting the seed being used in a distribution
  189. """
  190. logger.info("test_deterministic_run_distribution")
  191. # Save original configuration values
  192. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  193. seed_original = ds.config.get_seed()
  194. # when we set the seed all operations within our dataset should be deterministic
  195. ds.config.set_seed(0)
  196. ds.config.set_num_parallel_workers(1)
  197. # First dataset
  198. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  199. random_horizontal_flip_op = c_vision.RandomHorizontalFlip(0.1)
  200. decode_op = c_vision.Decode()
  201. data1 = data1.map(input_columns=["image"], operations=decode_op)
  202. data1 = data1.map(input_columns=["image"], operations=random_horizontal_flip_op)
  203. # Second dataset
  204. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  205. data2 = data2.map(input_columns=["image"], operations=decode_op)
  206. # If seed is set up on constructor, so the two ops output deterministic sequence
  207. random_horizontal_flip_op2 = c_vision.RandomHorizontalFlip(0.1)
  208. data2 = data2.map(input_columns=["image"], operations=random_horizontal_flip_op2)
  209. dataset_equal(data1, data2, 0)
  210. # Restore original configuration values
  211. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  212. ds.config.set_seed(seed_original)
  213. def test_deterministic_python_seed():
  214. """
  215. Test deterministic execution with seed in python
  216. """
  217. logger.info("test_deterministic_python_seed")
  218. # Save original configuration values
  219. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  220. seed_original = ds.config.get_seed()
  221. ds.config.set_seed(0)
  222. ds.config.set_num_parallel_workers(1)
  223. # First dataset
  224. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  225. transforms = [
  226. py_vision.Decode(),
  227. py_vision.RandomCrop([512, 512], [200, 200, 200, 200]),
  228. py_vision.ToTensor(),
  229. ]
  230. transform = py_vision.ComposeOp(transforms)
  231. data1 = data1.map(input_columns=["image"], operations=transform())
  232. data1_output = []
  233. # config.set_seed() calls random.seed()
  234. for data_one in data1.create_dict_iterator(num_epochs=1):
  235. data1_output.append(data_one["image"])
  236. # Second dataset
  237. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  238. data2 = data2.map(input_columns=["image"], operations=transform())
  239. # config.set_seed() calls random.seed(), resets seed for next dataset iterator
  240. ds.config.set_seed(0)
  241. data2_output = []
  242. for data_two in data2.create_dict_iterator(num_epochs=1):
  243. data2_output.append(data_two["image"])
  244. np.testing.assert_equal(data1_output, data2_output)
  245. # Restore original configuration values
  246. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  247. ds.config.set_seed(seed_original)
  248. def test_deterministic_python_seed_multi_thread():
  249. """
  250. Test deterministic execution with seed in python, this fails with multi-thread pyfunc run
  251. """
  252. logger.info("test_deterministic_python_seed_multi_thread")
  253. # Save original configuration values
  254. num_parallel_workers_original = ds.config.get_num_parallel_workers()
  255. seed_original = ds.config.get_seed()
  256. ds.config.set_num_parallel_workers(3)
  257. ds.config.set_seed(0)
  258. # when we set the seed all operations within our dataset should be deterministic
  259. # First dataset
  260. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  261. transforms = [
  262. py_vision.Decode(),
  263. py_vision.RandomCrop([512, 512], [200, 200, 200, 200]),
  264. py_vision.ToTensor(),
  265. ]
  266. transform = py_vision.ComposeOp(transforms)
  267. data1 = data1.map(input_columns=["image"], operations=transform(), python_multiprocessing=True)
  268. data1_output = []
  269. # config.set_seed() calls random.seed()
  270. for data_one in data1.create_dict_iterator(num_epochs=1):
  271. data1_output.append(data_one["image"])
  272. # Second dataset
  273. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
  274. # If seed is set up on constructor
  275. data2 = data2.map(input_columns=["image"], operations=transform(), python_multiprocessing=True)
  276. # config.set_seed() calls random.seed()
  277. ds.config.set_seed(0)
  278. data2_output = []
  279. for data_two in data2.create_dict_iterator(num_epochs=1):
  280. data2_output.append(data_two["image"])
  281. try:
  282. np.testing.assert_equal(data1_output, data2_output)
  283. except Exception as e:
  284. # expect output to not match during multi-threaded excution
  285. logger.info("Got an exception in DE: {}".format(str(e)))
  286. assert "Array" in str(e)
  287. # Restore original configuration values
  288. ds.config.set_num_parallel_workers(num_parallel_workers_original)
  289. ds.config.set_seed(seed_original)
  290. if __name__ == '__main__':
  291. test_basic()
  292. test_get_seed()
  293. test_pipeline()
  294. test_deterministic_run_fail()
  295. test_seed_undeterministic()
  296. test_seed_deterministic()
  297. test_deterministic_run_distribution()
  298. test_deterministic_python_seed()
  299. test_deterministic_python_seed_multi_thread()