You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_cache_map.py 40 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Testing cache operator with mappable datasets
  17. """
  18. import os
  19. import pytest
  20. import mindspore.dataset as ds
  21. import mindspore.dataset.vision.c_transforms as c_vision
  22. from mindspore import log as logger
  23. from util import save_and_check_md5
  24. DATA_DIR = "../data/dataset/testImageNetData/train/"
  25. COCO_DATA_DIR = "../data/dataset/testCOCO/train/"
  26. COCO_ANNOTATION_FILE = "../data/dataset/testCOCO/annotations/train.json"
  27. NO_IMAGE_DIR = "../data/dataset/testRandomData/"
  28. GENERATE_GOLDEN = False
  29. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  30. def test_cache_map_basic1():
  31. """
  32. Test mappable leaf with cache op right over the leaf
  33. Repeat
  34. |
  35. Map(decode)
  36. |
  37. Cache
  38. |
  39. ImageFolder
  40. """
  41. logger.info("Test cache map basic 1")
  42. if "SESSION_ID" in os.environ:
  43. session_id = int(os.environ['SESSION_ID'])
  44. else:
  45. session_id = 1
  46. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  47. # This DATA_DIR only has 2 images in it
  48. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  49. decode_op = c_vision.Decode()
  50. ds1 = ds1.map(operations=decode_op, input_columns=["image"])
  51. ds1 = ds1.repeat(4)
  52. filename = "cache_map_01_result.npz"
  53. save_and_check_md5(ds1, filename, generate_golden=GENERATE_GOLDEN)
  54. logger.info("test_cache_map_basic1 Ended.\n")
  55. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  56. def test_cache_map_basic2():
  57. """
  58. Test mappable leaf with the cache op later in the tree above the map(decode)
  59. Repeat
  60. |
  61. Cache
  62. |
  63. Map(decode)
  64. |
  65. ImageFolder
  66. """
  67. logger.info("Test cache map basic 2")
  68. if "SESSION_ID" in os.environ:
  69. session_id = int(os.environ['SESSION_ID'])
  70. else:
  71. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  72. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  73. # This DATA_DIR only has 2 images in it
  74. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  75. decode_op = c_vision.Decode()
  76. ds1 = ds1.map(operations=decode_op, input_columns=["image"], cache=some_cache)
  77. ds1 = ds1.repeat(4)
  78. filename = "cache_map_02_result.npz"
  79. save_and_check_md5(ds1, filename, generate_golden=GENERATE_GOLDEN)
  80. logger.info("test_cache_map_basic2 Ended.\n")
  81. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  82. def test_cache_map_basic3():
  83. """
  84. Test a repeat under mappable cache
  85. Cache
  86. |
  87. Map(decode)
  88. |
  89. Repeat
  90. |
  91. ImageFolder
  92. """
  93. logger.info("Test cache basic 3")
  94. if "SESSION_ID" in os.environ:
  95. session_id = int(os.environ['SESSION_ID'])
  96. else:
  97. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  98. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  99. # This DATA_DIR only has 2 images in it
  100. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  101. decode_op = c_vision.Decode()
  102. ds1 = ds1.repeat(4)
  103. ds1 = ds1.map(operations=decode_op, input_columns=["image"], cache=some_cache)
  104. logger.info("ds1.dataset_size is ", ds1.get_dataset_size())
  105. num_iter = 0
  106. for _ in ds1.create_dict_iterator(num_epochs=1):
  107. logger.info("get data from dataset")
  108. num_iter += 1
  109. logger.info("Number of data in ds1: {} ".format(num_iter))
  110. assert num_iter == 8
  111. logger.info('test_cache_basic3 Ended.\n')
  112. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  113. def test_cache_map_basic4():
  114. """
  115. Test different rows result in core dump
  116. """
  117. logger.info("Test cache basic 4")
  118. if "SESSION_ID" in os.environ:
  119. session_id = int(os.environ['SESSION_ID'])
  120. else:
  121. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  122. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  123. # This DATA_DIR only has 2 images in it
  124. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  125. decode_op = c_vision.Decode()
  126. ds1 = ds1.repeat(4)
  127. ds1 = ds1.map(operations=decode_op, input_columns=["image"])
  128. logger.info("ds1.dataset_size is ", ds1.get_dataset_size())
  129. shape = ds1.output_shapes()
  130. logger.info(shape)
  131. num_iter = 0
  132. for _ in ds1.create_dict_iterator(num_epochs=1):
  133. logger.info("get data from dataset")
  134. num_iter += 1
  135. logger.info("Number of data in ds1: {} ".format(num_iter))
  136. assert num_iter == 8
  137. logger.info('test_cache_basic4 Ended.\n')
  138. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  139. def test_cache_map_basic5():
  140. """
  141. Test Map with non-deterministic TensorOps above cache
  142. repeat
  143. |
  144. Map(decode, randomCrop)
  145. |
  146. Cache
  147. |
  148. ImageFolder
  149. """
  150. logger.info("Test cache failure 5")
  151. if "SESSION_ID" in os.environ:
  152. session_id = int(os.environ['SESSION_ID'])
  153. else:
  154. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  155. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  156. # This DATA_DIR only has 2 images in it
  157. data = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  158. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  159. decode_op = c_vision.Decode()
  160. data = data.map(input_columns=["image"], operations=decode_op)
  161. data = data.map(input_columns=["image"], operations=random_crop_op)
  162. data = data.repeat(4)
  163. num_iter = 0
  164. for _ in data.create_dict_iterator():
  165. num_iter += 1
  166. logger.info("Number of data in ds1: {} ".format(num_iter))
  167. assert num_iter == 8
  168. logger.info('test_cache_failure5 Ended.\n')
  169. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  170. def test_cache_map_basic6():
  171. """
  172. Test cache as root node
  173. cache
  174. |
  175. ImageFolder
  176. """
  177. logger.info("Test cache basic 6")
  178. if "SESSION_ID" in os.environ:
  179. session_id = int(os.environ['SESSION_ID'])
  180. else:
  181. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  182. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  183. # This DATA_DIR only has 2 images in it
  184. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  185. num_iter = 0
  186. for _ in ds1.create_dict_iterator(num_epochs=1):
  187. logger.info("get data from dataset")
  188. num_iter += 1
  189. logger.info("Number of data in ds1: {} ".format(num_iter))
  190. assert num_iter == 2
  191. logger.info('test_cache_basic6 Ended.\n')
  192. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  193. def test_cache_map_failure1():
  194. """
  195. Test nested cache (failure)
  196. Repeat
  197. |
  198. Cache
  199. |
  200. Map(decode)
  201. |
  202. Cache
  203. |
  204. ImageFolder
  205. """
  206. logger.info("Test cache failure 1")
  207. if "SESSION_ID" in os.environ:
  208. session_id = int(os.environ['SESSION_ID'])
  209. else:
  210. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  211. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  212. # This DATA_DIR only has 2 images in it
  213. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  214. decode_op = c_vision.Decode()
  215. ds1 = ds1.map(operations=decode_op, input_columns=["image"], cache=some_cache)
  216. ds1 = ds1.repeat(4)
  217. with pytest.raises(RuntimeError) as e:
  218. num_iter = 0
  219. for _ in ds1.create_dict_iterator(num_epochs=1):
  220. num_iter += 1
  221. assert "Nested cache operations is not supported!" in str(e.value)
  222. assert num_iter == 0
  223. logger.info('test_cache_failure1 Ended.\n')
  224. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  225. def test_cache_map_failure2():
  226. """
  227. Test zip under cache (failure)
  228. repeat
  229. |
  230. Cache
  231. |
  232. Map(decode)
  233. |
  234. Zip
  235. | |
  236. ImageFolder ImageFolder
  237. """
  238. logger.info("Test cache failure 2")
  239. if "SESSION_ID" in os.environ:
  240. session_id = int(os.environ['SESSION_ID'])
  241. else:
  242. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  243. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  244. # This DATA_DIR only has 2 images in it
  245. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  246. ds2 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  247. dsz = ds.zip((ds1, ds2))
  248. decode_op = c_vision.Decode()
  249. dsz = dsz.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  250. dsz = dsz.repeat(4)
  251. with pytest.raises(RuntimeError) as e:
  252. num_iter = 0
  253. for _ in dsz.create_dict_iterator():
  254. num_iter += 1
  255. assert "ZipOp is currently not supported as a descendant operator under a cache" in str(e.value)
  256. assert num_iter == 0
  257. logger.info('test_cache_failure2 Ended.\n')
  258. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  259. def test_cache_map_failure3():
  260. """
  261. Test batch under cache (failure)
  262. repeat
  263. |
  264. Cache
  265. |
  266. Map(resize)
  267. |
  268. Batch
  269. |
  270. ImageFolder
  271. """
  272. logger.info("Test cache failure 3")
  273. if "SESSION_ID" in os.environ:
  274. session_id = int(os.environ['SESSION_ID'])
  275. else:
  276. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  277. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  278. # This DATA_DIR only has 2 images in it
  279. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  280. ds1 = ds1.batch(2)
  281. resize_op = c_vision.Resize((224, 224))
  282. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  283. ds1 = ds1.repeat(4)
  284. with pytest.raises(RuntimeError) as e:
  285. num_iter = 0
  286. for _ in ds1.create_dict_iterator():
  287. num_iter += 1
  288. assert "Unexpected error. Expect positive row id: -1" in str(e.value)
  289. assert num_iter == 0
  290. logger.info('test_cache_failure3 Ended.\n')
  291. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  292. def test_cache_map_failure4():
  293. """
  294. Test filter under cache (failure)
  295. repeat
  296. |
  297. Cache
  298. |
  299. Map(decode)
  300. |
  301. Filter
  302. |
  303. ImageFolder
  304. """
  305. logger.info("Test cache failure 4")
  306. if "SESSION_ID" in os.environ:
  307. session_id = int(os.environ['SESSION_ID'])
  308. else:
  309. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  310. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  311. # This DATA_DIR only has 2 images in it
  312. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  313. ds1 = ds1.filter(predicate=lambda data: data < 11, input_columns=["label"])
  314. decode_op = c_vision.Decode()
  315. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  316. ds1 = ds1.repeat(4)
  317. with pytest.raises(RuntimeError) as e:
  318. num_iter = 0
  319. for _ in ds1.create_dict_iterator():
  320. num_iter += 1
  321. assert "FilterOp is currently not supported as a descendant operator under a cache" in str(e.value)
  322. assert num_iter == 0
  323. logger.info('test_cache_failure4 Ended.\n')
  324. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  325. def test_cache_map_failure5():
  326. """
  327. Test Map with non-deterministic TensorOps under cache (failure)
  328. repeat
  329. |
  330. Cache
  331. |
  332. Map(decode, randomCrop)
  333. |
  334. ImageFolder
  335. """
  336. logger.info("Test cache failure 5")
  337. if "SESSION_ID" in os.environ:
  338. session_id = int(os.environ['SESSION_ID'])
  339. else:
  340. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  341. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  342. # This DATA_DIR only has 2 images in it
  343. data = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  344. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  345. decode_op = c_vision.Decode()
  346. data = data.map(input_columns=["image"], operations=decode_op)
  347. data = data.map(input_columns=["image"], operations=random_crop_op, cache=some_cache)
  348. data = data.repeat(4)
  349. with pytest.raises(RuntimeError) as e:
  350. num_iter = 0
  351. for _ in data.create_dict_iterator():
  352. num_iter += 1
  353. assert "MapOp with non-deterministic TensorOps is currently not supported as a descendant of cache" in str(e.value)
  354. assert num_iter == 0
  355. logger.info('test_cache_failure5 Ended.\n')
  356. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  357. def test_cache_map_failure6():
  358. """
  359. Test no-cache-supporting leaf ops with Map under cache (failure)
  360. repeat
  361. |
  362. Cache
  363. |
  364. Map(resize)
  365. |
  366. Coco
  367. """
  368. logger.info("Test cache failure 6")
  369. if "SESSION_ID" in os.environ:
  370. session_id = int(os.environ['SESSION_ID'])
  371. else:
  372. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  373. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  374. data = ds.CocoDataset(COCO_DATA_DIR, annotation_file=COCO_ANNOTATION_FILE, task="Detection", decode=True)
  375. resize_op = c_vision.Resize((224, 224))
  376. data = data.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  377. data = data.repeat(4)
  378. with pytest.raises(RuntimeError) as e:
  379. num_iter = 0
  380. for _ in data.create_dict_iterator():
  381. num_iter += 1
  382. assert "There is currently no support for CocoOp under cache" in str(e.value)
  383. assert num_iter == 0
  384. logger.info('test_cache_failure6 Ended.\n')
  385. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  386. def test_cache_map_parameter_check():
  387. """
  388. Test illegal parameters for DatasetCache
  389. """
  390. logger.info("Test cache map parameter check")
  391. with pytest.raises(ValueError) as info:
  392. ds.DatasetCache(session_id=-1, size=0, spilling=True)
  393. assert "Input is not within the required interval" in str(info.value)
  394. with pytest.raises(TypeError) as info:
  395. ds.DatasetCache(session_id="1", size=0, spilling=True)
  396. assert "Argument session_id with value 1 is not of type (<class 'int'>,)" in str(info.value)
  397. with pytest.raises(TypeError) as info:
  398. ds.DatasetCache(session_id=None, size=0, spilling=True)
  399. assert "Argument session_id with value None is not of type (<class 'int'>,)" in str(info.value)
  400. with pytest.raises(ValueError) as info:
  401. ds.DatasetCache(session_id=1, size=-1, spilling=True)
  402. assert "Input is not within the required interval" in str(info.value)
  403. with pytest.raises(TypeError) as info:
  404. ds.DatasetCache(session_id=1, size="1", spilling=True)
  405. assert "Argument size with value 1 is not of type (<class 'int'>,)" in str(info.value)
  406. with pytest.raises(TypeError) as info:
  407. ds.DatasetCache(session_id=1, size=None, spilling=True)
  408. assert "Argument size with value None is not of type (<class 'int'>,)" in str(info.value)
  409. with pytest.raises(TypeError) as info:
  410. ds.DatasetCache(session_id=1, size=0, spilling="illegal")
  411. assert "Argument spilling with value illegal is not of type (<class 'bool'>,)" in str(info.value)
  412. with pytest.raises(RuntimeError) as err:
  413. ds.DatasetCache(session_id=1, size=0, spilling=True, hostname="illegal")
  414. assert "Unexpected error. now cache client has to be on the same host with cache server" in str(err.value)
  415. with pytest.raises(RuntimeError) as err:
  416. ds.DatasetCache(session_id=1, size=0, spilling=True, hostname="127.0.0.2")
  417. assert "Unexpected error. now cache client has to be on the same host with cache server" in str(err.value)
  418. with pytest.raises(TypeError) as info:
  419. ds.DatasetCache(session_id=1, size=0, spilling=True, port="illegal")
  420. assert "incompatible constructor arguments" in str(info.value)
  421. with pytest.raises(TypeError) as info:
  422. ds.DatasetCache(session_id=1, size=0, spilling=True, port="50052")
  423. assert "incompatible constructor arguments" in str(info.value)
  424. with pytest.raises(RuntimeError) as err:
  425. ds.DatasetCache(session_id=1, size=0, spilling=True, port=0)
  426. assert "Unexpected error. port must be positive" in str(err.value)
  427. with pytest.raises(RuntimeError) as err:
  428. ds.DatasetCache(session_id=1, size=0, spilling=True, port=65536)
  429. assert "Unexpected error. illegal port number" in str(err.value)
  430. with pytest.raises(TypeError) as err:
  431. ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=True)
  432. assert "Argument cache with value True is not of type" in str(err.value)
  433. logger.info("test_cache_map_parameter_check Ended.\n")
  434. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  435. def test_cache_map_running_twice1():
  436. """
  437. Executing the same pipeline for twice (from python), with cache injected after map
  438. Repeat
  439. |
  440. Cache
  441. |
  442. Map(decode)
  443. |
  444. ImageFolder
  445. """
  446. logger.info("Test cache map running twice 1")
  447. if "SESSION_ID" in os.environ:
  448. session_id = int(os.environ['SESSION_ID'])
  449. else:
  450. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  451. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  452. # This DATA_DIR only has 2 images in it
  453. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  454. decode_op = c_vision.Decode()
  455. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  456. ds1 = ds1.repeat(4)
  457. num_iter = 0
  458. for _ in ds1.create_dict_iterator():
  459. num_iter += 1
  460. logger.info("Number of data in ds1: {} ".format(num_iter))
  461. assert num_iter == 8
  462. num_iter = 0
  463. for _ in ds1.create_dict_iterator():
  464. num_iter += 1
  465. logger.info("Number of data in ds1: {} ".format(num_iter))
  466. assert num_iter == 8
  467. logger.info("test_cache_map_running_twice1 Ended.\n")
  468. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  469. def test_cache_map_running_twice2():
  470. """
  471. Executing the same pipeline for twice (from shell), with cache injected after leaf
  472. Repeat
  473. |
  474. Map(decode)
  475. |
  476. Cache
  477. |
  478. ImageFolder
  479. """
  480. logger.info("Test cache map running twice 2")
  481. if "SESSION_ID" in os.environ:
  482. session_id = int(os.environ['SESSION_ID'])
  483. else:
  484. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  485. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  486. # This DATA_DIR only has 2 images in it
  487. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  488. decode_op = c_vision.Decode()
  489. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  490. ds1 = ds1.repeat(4)
  491. num_iter = 0
  492. for _ in ds1.create_dict_iterator():
  493. num_iter += 1
  494. logger.info("Number of data in ds1: {} ".format(num_iter))
  495. assert num_iter == 8
  496. logger.info("test_cache_map_running_twice2 Ended.\n")
  497. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  498. def test_cache_map_extra_small_size1():
  499. """
  500. Test running pipeline with cache of extra small size and spilling true
  501. Repeat
  502. |
  503. Map(decode)
  504. |
  505. Cache
  506. |
  507. ImageFolder
  508. """
  509. logger.info("Test cache map extra small size 1")
  510. if "SESSION_ID" in os.environ:
  511. session_id = int(os.environ['SESSION_ID'])
  512. else:
  513. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  514. some_cache = ds.DatasetCache(session_id=session_id, size=1, spilling=True)
  515. # This DATA_DIR only has 2 images in it
  516. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  517. decode_op = c_vision.Decode()
  518. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  519. ds1 = ds1.repeat(4)
  520. num_iter = 0
  521. for _ in ds1.create_dict_iterator():
  522. num_iter += 1
  523. logger.info("Number of data in ds1: {} ".format(num_iter))
  524. assert num_iter == 8
  525. logger.info("test_cache_map_extra_small_size1 Ended.\n")
  526. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  527. def test_cache_map_extra_small_size2():
  528. """
  529. Test running pipeline with cache of extra small size and spilling false
  530. Repeat
  531. |
  532. Cache
  533. |
  534. Map(decode)
  535. |
  536. ImageFolder
  537. """
  538. logger.info("Test cache map extra small size 2")
  539. if "SESSION_ID" in os.environ:
  540. session_id = int(os.environ['SESSION_ID'])
  541. else:
  542. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  543. some_cache = ds.DatasetCache(session_id=session_id, size=1, spilling=False)
  544. # This DATA_DIR only has 2 images in it
  545. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  546. decode_op = c_vision.Decode()
  547. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  548. ds1 = ds1.repeat(4)
  549. num_iter = 0
  550. for _ in ds1.create_dict_iterator():
  551. num_iter += 1
  552. logger.info("Number of data in ds1: {} ".format(num_iter))
  553. assert num_iter == 8
  554. logger.info("test_cache_map_extra_small_size2 Ended.\n")
  555. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  556. def test_cache_map_no_image():
  557. """
  558. Test cache with no dataset existing in the path
  559. Repeat
  560. |
  561. Map(decode)
  562. |
  563. Cache
  564. |
  565. ImageFolder
  566. """
  567. logger.info("Test cache map no image")
  568. if "SESSION_ID" in os.environ:
  569. session_id = int(os.environ['SESSION_ID'])
  570. else:
  571. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  572. some_cache = ds.DatasetCache(session_id=session_id, size=1, spilling=False)
  573. # This DATA_DIR only has 2 images in it
  574. ds1 = ds.ImageFolderDataset(dataset_dir=NO_IMAGE_DIR, cache=some_cache)
  575. decode_op = c_vision.Decode()
  576. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  577. ds1 = ds1.repeat(4)
  578. with pytest.raises(RuntimeError):
  579. num_iter = 0
  580. for _ in ds1.create_dict_iterator():
  581. num_iter += 1
  582. assert num_iter == 0
  583. logger.info("test_cache_map_no_image Ended.\n")
  584. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  585. def test_cache_map_parallel_pipeline1(shard):
  586. """
  587. Test running two parallel pipelines (sharing cache) with cache injected after leaf op
  588. Repeat
  589. |
  590. Map(decode)
  591. |
  592. Cache
  593. |
  594. ImageFolder
  595. """
  596. logger.info("Test cache map parallel pipeline 1")
  597. if "SESSION_ID" in os.environ:
  598. session_id = int(os.environ['SESSION_ID'])
  599. else:
  600. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  601. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  602. # This DATA_DIR only has 2 images in it
  603. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, num_shards=2, shard_id=int(shard), cache=some_cache)
  604. decode_op = c_vision.Decode()
  605. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  606. ds1 = ds1.repeat(4)
  607. num_iter = 0
  608. for _ in ds1.create_dict_iterator():
  609. num_iter += 1
  610. logger.info("Number of data in ds1: {} ".format(num_iter))
  611. assert num_iter == 4
  612. logger.info("test_cache_map_parallel_pipeline1 Ended.\n")
  613. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  614. def test_cache_map_parallel_pipeline2(shard):
  615. """
  616. Test running two parallel pipelines (sharing cache) with cache injected after map op
  617. Repeat
  618. |
  619. Cache
  620. |
  621. Map(decode)
  622. |
  623. ImageFolder
  624. """
  625. logger.info("Test cache map parallel pipeline 2")
  626. if "SESSION_ID" in os.environ:
  627. session_id = int(os.environ['SESSION_ID'])
  628. else:
  629. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  630. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  631. # This DATA_DIR only has 2 images in it
  632. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, num_shards=2, shard_id=int(shard))
  633. decode_op = c_vision.Decode()
  634. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  635. ds1 = ds1.repeat(4)
  636. num_iter = 0
  637. for _ in ds1.create_dict_iterator():
  638. num_iter += 1
  639. logger.info("Number of data in ds1: {} ".format(num_iter))
  640. assert num_iter == 4
  641. logger.info("test_cache_map_parallel_pipeline2 Ended.\n")
  642. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  643. def test_cache_map_parallel_workers():
  644. """
  645. Test cache with num_parallel_workers > 1 set for map op and leaf op
  646. Repeat
  647. |
  648. cache
  649. |
  650. Map(decode)
  651. |
  652. ImageFolder
  653. """
  654. logger.info("Test cache map parallel workers")
  655. if "SESSION_ID" in os.environ:
  656. session_id = int(os.environ['SESSION_ID'])
  657. else:
  658. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  659. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  660. # This DATA_DIR only has 2 images in it
  661. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, num_parallel_workers=4)
  662. decode_op = c_vision.Decode()
  663. ds1 = ds1.map(input_columns=["image"], operations=decode_op, num_parallel_workers=4, cache=some_cache)
  664. ds1 = ds1.repeat(4)
  665. num_iter = 0
  666. for _ in ds1.create_dict_iterator():
  667. num_iter += 1
  668. logger.info("Number of data in ds1: {} ".format(num_iter))
  669. assert num_iter == 8
  670. logger.info("test_cache_map_parallel_workers Ended.\n")
  671. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  672. def test_cache_map_server_workers_1():
  673. """
  674. start cache server with --workers 1 and then test cache function
  675. Repeat
  676. |
  677. cache
  678. |
  679. Map(decode)
  680. |
  681. ImageFolder
  682. """
  683. logger.info("Test cache map server workers 1")
  684. if "SESSION_ID" in os.environ:
  685. session_id = int(os.environ['SESSION_ID'])
  686. else:
  687. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  688. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  689. # This DATA_DIR only has 2 images in it
  690. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  691. decode_op = c_vision.Decode()
  692. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  693. ds1 = ds1.repeat(4)
  694. num_iter = 0
  695. for _ in ds1.create_dict_iterator():
  696. num_iter += 1
  697. logger.info("Number of data in ds1: {} ".format(num_iter))
  698. assert num_iter == 8
  699. logger.info("test_cache_map_server_workers_1 Ended.\n")
  700. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  701. def test_cache_map_server_workers_100():
  702. """
  703. start cache server with --workers 100 and then test cache function
  704. Repeat
  705. |
  706. Map(decode)
  707. |
  708. cache
  709. |
  710. ImageFolder
  711. """
  712. logger.info("Test cache map server workers 100")
  713. if "SESSION_ID" in os.environ:
  714. session_id = int(os.environ['SESSION_ID'])
  715. else:
  716. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  717. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  718. # This DATA_DIR only has 2 images in it
  719. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  720. decode_op = c_vision.Decode()
  721. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  722. ds1 = ds1.repeat(4)
  723. num_iter = 0
  724. for _ in ds1.create_dict_iterator():
  725. num_iter += 1
  726. logger.info("Number of data in ds1: {} ".format(num_iter))
  727. assert num_iter == 8
  728. logger.info("test_cache_map_server_workers_100 Ended.\n")
  729. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  730. def test_cache_map_num_connections_1():
  731. """
  732. Test setting num_connections=1 in DatasetCache
  733. Repeat
  734. |
  735. cache
  736. |
  737. Map(decode)
  738. |
  739. ImageFolder
  740. """
  741. logger.info("Test cache map num_connections 1")
  742. if "SESSION_ID" in os.environ:
  743. session_id = int(os.environ['SESSION_ID'])
  744. else:
  745. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  746. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, num_connections=1)
  747. # This DATA_DIR only has 2 images in it
  748. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  749. decode_op = c_vision.Decode()
  750. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  751. ds1 = ds1.repeat(4)
  752. num_iter = 0
  753. for _ in ds1.create_dict_iterator():
  754. num_iter += 1
  755. logger.info("Number of data in ds1: {} ".format(num_iter))
  756. assert num_iter == 8
  757. logger.info("test_cache_map_num_connections_1 Ended.\n")
  758. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  759. def test_cache_map_num_connections_100():
  760. """
  761. Test setting num_connections=100 in DatasetCache
  762. Repeat
  763. |
  764. Map(decode)
  765. |
  766. cache
  767. |
  768. ImageFolder
  769. """
  770. logger.info("Test cache map num_connections 100")
  771. if "SESSION_ID" in os.environ:
  772. session_id = int(os.environ['SESSION_ID'])
  773. else:
  774. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  775. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, num_connections=100)
  776. # This DATA_DIR only has 2 images in it
  777. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  778. decode_op = c_vision.Decode()
  779. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  780. ds1 = ds1.repeat(4)
  781. num_iter = 0
  782. for _ in ds1.create_dict_iterator():
  783. num_iter += 1
  784. logger.info("Number of data in ds1: {} ".format(num_iter))
  785. assert num_iter == 8
  786. logger.info("test_cache_map_num_connections_100 Ended.\n")
  787. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  788. def test_cache_map_prefetch_size_1():
  789. """
  790. Test setting prefetch_size=1 in DatasetCache
  791. Repeat
  792. |
  793. cache
  794. |
  795. Map(decode)
  796. |
  797. ImageFolder
  798. """
  799. logger.info("Test cache map prefetch_size 1")
  800. if "SESSION_ID" in os.environ:
  801. session_id = int(os.environ['SESSION_ID'])
  802. else:
  803. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  804. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, prefetch_size=1)
  805. # This DATA_DIR only has 2 images in it
  806. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  807. decode_op = c_vision.Decode()
  808. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  809. ds1 = ds1.repeat(4)
  810. num_iter = 0
  811. for _ in ds1.create_dict_iterator():
  812. num_iter += 1
  813. logger.info("Number of data in ds1: {} ".format(num_iter))
  814. assert num_iter == 8
  815. logger.info("test_cache_map_prefetch_size_1 Ended.\n")
  816. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  817. def test_cache_map_prefetch_size_100():
  818. """
  819. Test setting prefetch_size=100 in DatasetCache
  820. Repeat
  821. |
  822. Map(decode)
  823. |
  824. cache
  825. |
  826. ImageFolder
  827. """
  828. logger.info("Test cache map prefetch_size 100")
  829. if "SESSION_ID" in os.environ:
  830. session_id = int(os.environ['SESSION_ID'])
  831. else:
  832. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  833. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, prefetch_size=100)
  834. # This DATA_DIR only has 2 images in it
  835. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  836. decode_op = c_vision.Decode()
  837. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  838. ds1 = ds1.repeat(4)
  839. num_iter = 0
  840. for _ in ds1.create_dict_iterator():
  841. num_iter += 1
  842. logger.info("Number of data in ds1: {} ".format(num_iter))
  843. assert num_iter == 8
  844. logger.info("test_cache_map_prefetch_size_100 Ended.\n")
  845. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  846. def test_cache_map_to_device():
  847. """
  848. Test cache with to_device
  849. DeviceQueue
  850. |
  851. EpochCtrl
  852. |
  853. Repeat
  854. |
  855. Map(decode)
  856. |
  857. cache
  858. |
  859. ImageFolder
  860. """
  861. logger.info("Test cache map to_device")
  862. if "SESSION_ID" in os.environ:
  863. session_id = int(os.environ['SESSION_ID'])
  864. else:
  865. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  866. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  867. # This DATA_DIR only has 2 images in it
  868. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  869. decode_op = c_vision.Decode()
  870. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  871. ds1 = ds1.repeat(4)
  872. ds1 = ds1.to_device()
  873. ds1.send()
  874. logger.info("test_cache_map_to_device Ended.\n")
  875. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  876. def test_cache_map_epoch_ctrl1():
  877. """
  878. Test using two-loops method to run several epochs
  879. Map(decode)
  880. |
  881. cache
  882. |
  883. ImageFolder
  884. """
  885. logger.info("Test cache map epoch ctrl1")
  886. if "SESSION_ID" in os.environ:
  887. session_id = int(os.environ['SESSION_ID'])
  888. else:
  889. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  890. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  891. # This DATA_DIR only has 2 images in it
  892. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  893. decode_op = c_vision.Decode()
  894. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  895. num_epoch = 5
  896. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  897. epoch_count = 0
  898. for _ in range(num_epoch):
  899. row_count = 0
  900. for _ in iter1:
  901. row_count += 1
  902. logger.info("Number of data in ds1: {} ".format(row_count))
  903. assert row_count == 2
  904. epoch_count += 1
  905. assert epoch_count == num_epoch
  906. logger.info("test_cache_map_epoch_ctrl1 Ended.\n")
  907. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  908. def test_cache_map_epoch_ctrl2():
  909. """
  910. Test using two-loops method with infinite epochs
  911. cache
  912. |
  913. Map(decode)
  914. |
  915. ImageFolder
  916. """
  917. logger.info("Test cache map epoch ctrl2")
  918. if "SESSION_ID" in os.environ:
  919. session_id = int(os.environ['SESSION_ID'])
  920. else:
  921. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  922. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  923. # This DATA_DIR only has 2 images in it
  924. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  925. decode_op = c_vision.Decode()
  926. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  927. num_epoch = 5
  928. # iter1 will always assume there is a next epoch and never shutdown
  929. iter1 = ds1.create_dict_iterator()
  930. epoch_count = 0
  931. for _ in range(num_epoch):
  932. row_count = 0
  933. for _ in iter1:
  934. row_count += 1
  935. logger.info("Number of data in ds1: {} ".format(row_count))
  936. assert row_count == 2
  937. epoch_count += 1
  938. assert epoch_count == num_epoch
  939. # manually stop the iterator
  940. iter1.stop()
  941. logger.info("test_cache_map_epoch_ctrl2 Ended.\n")
  942. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  943. def test_cache_map_epoch_ctrl3():
  944. """
  945. Test using two-loops method with infinite epochs over repeat
  946. repeat
  947. |
  948. Map(decode)
  949. |
  950. cache
  951. |
  952. ImageFolder
  953. """
  954. logger.info("Test cache map epoch ctrl3")
  955. if "SESSION_ID" in os.environ:
  956. session_id = int(os.environ['SESSION_ID'])
  957. else:
  958. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  959. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  960. # This DATA_DIR only has 2 images in it
  961. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  962. decode_op = c_vision.Decode()
  963. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  964. ds1 = ds1.repeat(2)
  965. num_epoch = 5
  966. # iter1 will always assume there is a next epoch and never shutdown
  967. iter1 = ds1.create_dict_iterator()
  968. epoch_count = 0
  969. for _ in range(num_epoch):
  970. row_count = 0
  971. for _ in iter1:
  972. row_count += 1
  973. logger.info("Number of data in ds1: {} ".format(row_count))
  974. assert row_count == 4
  975. epoch_count += 1
  976. assert epoch_count == num_epoch
  977. # reply on garbage collector to destroy iter1
  978. logger.info("test_cache_map_epoch_ctrl3 Ended.\n")
  979. if __name__ == '__main__':
  980. test_cache_map_basic1()
  981. test_cache_map_basic2()
  982. test_cache_map_basic3()
  983. test_cache_map_basic4()
  984. test_cache_map_failure1()
  985. test_cache_map_failure2()
  986. test_cache_map_failure3()
  987. test_cache_map_failure4()