You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_cache_map.py 58 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Testing cache operator with mappable datasets
  17. """
  18. import os
  19. import pytest
  20. import numpy as np
  21. import mindspore.dataset as ds
  22. import mindspore.dataset.vision.c_transforms as c_vision
  23. from mindspore import log as logger
  24. from util import save_and_check_md5
  25. DATA_DIR = "../data/dataset/testImageNetData/train/"
  26. COCO_DATA_DIR = "../data/dataset/testCOCO/train/"
  27. COCO_ANNOTATION_FILE = "../data/dataset/testCOCO/annotations/train.json"
  28. NO_IMAGE_DIR = "../data/dataset/testRandomData/"
  29. MNIST_DATA_DIR = "../data/dataset/testMnistData/"
  30. CELEBA_DATA_DIR = "../data/dataset/testCelebAData/"
  31. VOC_DATA_DIR = "../data/dataset/testVOC2012/"
  32. MANIFEST_DATA_FILE = "../data/dataset/testManifestData/test.manifest"
  33. CIFAR10_DATA_DIR = "../data/dataset/testCifar10Data/"
  34. CIFAR100_DATA_DIR = "../data/dataset/testCifar100Data/"
  35. MIND_RECORD_DATA_DIR = "../data/mindrecord/testTwoImageData/twobytes.mindrecord"
  36. GENERATE_GOLDEN = False
  37. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  38. def test_cache_map_basic1():
  39. """
  40. Test mappable leaf with cache op right over the leaf
  41. Repeat
  42. |
  43. Map(decode)
  44. |
  45. Cache
  46. |
  47. ImageFolder
  48. """
  49. logger.info("Test cache map basic 1")
  50. if "SESSION_ID" in os.environ:
  51. session_id = int(os.environ['SESSION_ID'])
  52. else:
  53. session_id = 1
  54. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  55. # This DATA_DIR only has 2 images in it
  56. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  57. decode_op = c_vision.Decode()
  58. ds1 = ds1.map(operations=decode_op, input_columns=["image"])
  59. ds1 = ds1.repeat(4)
  60. filename = "cache_map_01_result.npz"
  61. save_and_check_md5(ds1, filename, generate_golden=GENERATE_GOLDEN)
  62. logger.info("test_cache_map_basic1 Ended.\n")
  63. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  64. def test_cache_map_basic2():
  65. """
  66. Test mappable leaf with the cache op later in the tree above the map(decode)
  67. Repeat
  68. |
  69. Cache
  70. |
  71. Map(decode)
  72. |
  73. ImageFolder
  74. """
  75. logger.info("Test cache map basic 2")
  76. if "SESSION_ID" in os.environ:
  77. session_id = int(os.environ['SESSION_ID'])
  78. else:
  79. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  80. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  81. # This DATA_DIR only has 2 images in it
  82. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  83. decode_op = c_vision.Decode()
  84. ds1 = ds1.map(operations=decode_op, input_columns=["image"], cache=some_cache)
  85. ds1 = ds1.repeat(4)
  86. filename = "cache_map_02_result.npz"
  87. save_and_check_md5(ds1, filename, generate_golden=GENERATE_GOLDEN)
  88. logger.info("test_cache_map_basic2 Ended.\n")
  89. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  90. def test_cache_map_basic3():
  91. """
  92. Test different rows result in core dump
  93. """
  94. logger.info("Test cache basic 3")
  95. if "SESSION_ID" in os.environ:
  96. session_id = int(os.environ['SESSION_ID'])
  97. else:
  98. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  99. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  100. # This DATA_DIR only has 2 images in it
  101. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  102. decode_op = c_vision.Decode()
  103. ds1 = ds1.repeat(4)
  104. ds1 = ds1.map(operations=decode_op, input_columns=["image"])
  105. logger.info("ds1.dataset_size is ", ds1.get_dataset_size())
  106. shape = ds1.output_shapes()
  107. logger.info(shape)
  108. num_iter = 0
  109. for _ in ds1.create_dict_iterator(num_epochs=1):
  110. logger.info("get data from dataset")
  111. num_iter += 1
  112. logger.info("Number of data in ds1: {} ".format(num_iter))
  113. assert num_iter == 8
  114. logger.info('test_cache_basic3 Ended.\n')
  115. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  116. def test_cache_map_basic4():
  117. """
  118. Test Map with non-deterministic TensorOps above cache
  119. repeat
  120. |
  121. Map(decode, randomCrop)
  122. |
  123. Cache
  124. |
  125. ImageFolder
  126. """
  127. logger.info("Test cache basic 4")
  128. if "SESSION_ID" in os.environ:
  129. session_id = int(os.environ['SESSION_ID'])
  130. else:
  131. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  132. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  133. # This DATA_DIR only has 2 images in it
  134. data = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  135. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  136. decode_op = c_vision.Decode()
  137. data = data.map(input_columns=["image"], operations=decode_op)
  138. data = data.map(input_columns=["image"], operations=random_crop_op)
  139. data = data.repeat(4)
  140. num_iter = 0
  141. for _ in data.create_dict_iterator():
  142. num_iter += 1
  143. logger.info("Number of data in ds1: {} ".format(num_iter))
  144. assert num_iter == 8
  145. logger.info('test_cache_basic4 Ended.\n')
  146. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  147. def test_cache_map_basic5():
  148. """
  149. Test cache as root node
  150. cache
  151. |
  152. ImageFolder
  153. """
  154. logger.info("Test cache basic 5")
  155. if "SESSION_ID" in os.environ:
  156. session_id = int(os.environ['SESSION_ID'])
  157. else:
  158. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  159. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  160. # This DATA_DIR only has 2 images in it
  161. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  162. num_iter = 0
  163. for _ in ds1.create_dict_iterator(num_epochs=1):
  164. logger.info("get data from dataset")
  165. num_iter += 1
  166. logger.info("Number of data in ds1: {} ".format(num_iter))
  167. assert num_iter == 2
  168. logger.info('test_cache_basic5 Ended.\n')
  169. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  170. def test_cache_map_failure1():
  171. """
  172. Test nested cache (failure)
  173. Repeat
  174. |
  175. Cache
  176. |
  177. Map(decode)
  178. |
  179. Cache
  180. |
  181. ImageFolder
  182. """
  183. logger.info("Test cache failure 1")
  184. if "SESSION_ID" in os.environ:
  185. session_id = int(os.environ['SESSION_ID'])
  186. else:
  187. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  188. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  189. # This DATA_DIR only has 2 images in it
  190. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  191. decode_op = c_vision.Decode()
  192. ds1 = ds1.map(operations=decode_op, input_columns=["image"], cache=some_cache)
  193. ds1 = ds1.repeat(4)
  194. with pytest.raises(RuntimeError) as e:
  195. num_iter = 0
  196. for _ in ds1.create_dict_iterator(num_epochs=1):
  197. num_iter += 1
  198. assert "Nested cache operations is not supported!" in str(e.value)
  199. assert num_iter == 0
  200. logger.info('test_cache_failure1 Ended.\n')
  201. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  202. def test_cache_map_failure2():
  203. """
  204. Test zip under cache (failure)
  205. repeat
  206. |
  207. Cache
  208. |
  209. Map(decode)
  210. |
  211. Zip
  212. | |
  213. ImageFolder ImageFolder
  214. """
  215. logger.info("Test cache failure 2")
  216. if "SESSION_ID" in os.environ:
  217. session_id = int(os.environ['SESSION_ID'])
  218. else:
  219. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  220. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  221. # This DATA_DIR only has 2 images in it
  222. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  223. ds2 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  224. dsz = ds.zip((ds1, ds2))
  225. decode_op = c_vision.Decode()
  226. dsz = dsz.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  227. dsz = dsz.repeat(4)
  228. with pytest.raises(RuntimeError) as e:
  229. num_iter = 0
  230. for _ in dsz.create_dict_iterator():
  231. num_iter += 1
  232. assert "ZipOp is currently not supported as a descendant operator under a cache" in str(e.value)
  233. assert num_iter == 0
  234. logger.info('test_cache_failure2 Ended.\n')
  235. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  236. def test_cache_map_failure3():
  237. """
  238. Test batch under cache (failure)
  239. repeat
  240. |
  241. Cache
  242. |
  243. Map(resize)
  244. |
  245. Batch
  246. |
  247. ImageFolder
  248. """
  249. logger.info("Test cache failure 3")
  250. if "SESSION_ID" in os.environ:
  251. session_id = int(os.environ['SESSION_ID'])
  252. else:
  253. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  254. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  255. # This DATA_DIR only has 2 images in it
  256. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  257. ds1 = ds1.batch(2)
  258. resize_op = c_vision.Resize((224, 224))
  259. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  260. ds1 = ds1.repeat(4)
  261. with pytest.raises(RuntimeError) as e:
  262. num_iter = 0
  263. for _ in ds1.create_dict_iterator():
  264. num_iter += 1
  265. assert "Unexpected error. Expect positive row id: -1" in str(e.value)
  266. assert num_iter == 0
  267. logger.info('test_cache_failure3 Ended.\n')
  268. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  269. def test_cache_map_failure4():
  270. """
  271. Test filter under cache (failure)
  272. repeat
  273. |
  274. Cache
  275. |
  276. Map(decode)
  277. |
  278. Filter
  279. |
  280. ImageFolder
  281. """
  282. logger.info("Test cache failure 4")
  283. if "SESSION_ID" in os.environ:
  284. session_id = int(os.environ['SESSION_ID'])
  285. else:
  286. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  287. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  288. # This DATA_DIR only has 2 images in it
  289. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  290. ds1 = ds1.filter(predicate=lambda data: data < 11, input_columns=["label"])
  291. decode_op = c_vision.Decode()
  292. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  293. ds1 = ds1.repeat(4)
  294. with pytest.raises(RuntimeError) as e:
  295. num_iter = 0
  296. for _ in ds1.create_dict_iterator():
  297. num_iter += 1
  298. assert "FilterOp is currently not supported as a descendant operator under a cache" in str(e.value)
  299. assert num_iter == 0
  300. logger.info('test_cache_failure4 Ended.\n')
  301. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  302. def test_cache_map_failure5():
  303. """
  304. Test Map with non-deterministic TensorOps under cache (failure)
  305. repeat
  306. |
  307. Cache
  308. |
  309. Map(decode, randomCrop)
  310. |
  311. ImageFolder
  312. """
  313. logger.info("Test cache failure 5")
  314. if "SESSION_ID" in os.environ:
  315. session_id = int(os.environ['SESSION_ID'])
  316. else:
  317. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  318. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  319. # This DATA_DIR only has 2 images in it
  320. data = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  321. random_crop_op = c_vision.RandomCrop([512, 512], [200, 200, 200, 200])
  322. decode_op = c_vision.Decode()
  323. data = data.map(input_columns=["image"], operations=decode_op)
  324. data = data.map(input_columns=["image"], operations=random_crop_op, cache=some_cache)
  325. data = data.repeat(4)
  326. with pytest.raises(RuntimeError) as e:
  327. num_iter = 0
  328. for _ in data.create_dict_iterator():
  329. num_iter += 1
  330. assert "MapOp with non-deterministic TensorOps is currently not supported as a descendant of cache" in str(e.value)
  331. assert num_iter == 0
  332. logger.info('test_cache_failure5 Ended.\n')
  333. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  334. def test_cache_map_failure6():
  335. """
  336. Test no-cache-supporting MindRecord leaf with Map under cache (failure)
  337. repeat
  338. |
  339. Cache
  340. |
  341. Map(resize)
  342. |
  343. MindRecord
  344. """
  345. logger.info("Test cache failure 6")
  346. if "SESSION_ID" in os.environ:
  347. session_id = int(os.environ['SESSION_ID'])
  348. else:
  349. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  350. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  351. columns_list = ["id", "file_name", "label_name", "img_data", "label_data"]
  352. num_readers = 1
  353. # The dataset has 5 records
  354. data = ds.MindDataset(MIND_RECORD_DATA_DIR, columns_list, num_readers)
  355. resize_op = c_vision.Resize((224, 224))
  356. data = data.map(input_columns=["img_data"], operations=resize_op, cache=some_cache)
  357. data = data.repeat(4)
  358. with pytest.raises(RuntimeError) as e:
  359. num_iter = 0
  360. for _ in data.create_dict_iterator():
  361. num_iter += 1
  362. assert "There is currently no support for MindRecordOp under cache" in str(e.value)
  363. assert num_iter == 0
  364. logger.info('test_cache_failure6 Ended.\n')
  365. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  366. def test_cache_map_failure7():
  367. """
  368. Test no-cache-supporting Generator leaf with Map under cache (failure)
  369. repeat
  370. |
  371. Cache
  372. |
  373. Map(lambda x: x)
  374. |
  375. Generator
  376. """
  377. def generator_1d():
  378. for i in range(64):
  379. yield (np.array(i),)
  380. logger.info("Test cache failure 7")
  381. if "SESSION_ID" in os.environ:
  382. session_id = int(os.environ['SESSION_ID'])
  383. else:
  384. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  385. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  386. data = ds.GeneratorDataset(generator_1d, ["data"])
  387. data = data.map((lambda x: x), ["data"], cache=some_cache)
  388. data = data.repeat(4)
  389. with pytest.raises(RuntimeError) as e:
  390. num_iter = 0
  391. for _ in data.create_dict_iterator():
  392. num_iter += 1
  393. assert "There is currently no support for GeneratorOp under cache" in str(e.value)
  394. assert num_iter == 0
  395. logger.info('test_cache_failure7 Ended.\n')
  396. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  397. def test_cache_map_failure8():
  398. """
  399. Test a repeat under mappable cache (failure)
  400. Cache
  401. |
  402. Map(decode)
  403. |
  404. Repeat
  405. |
  406. ImageFolder
  407. """
  408. logger.info("Test cache failure 8")
  409. if "SESSION_ID" in os.environ:
  410. session_id = int(os.environ['SESSION_ID'])
  411. else:
  412. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  413. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  414. # This DATA_DIR only has 2 images in it
  415. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  416. decode_op = c_vision.Decode()
  417. ds1 = ds1.repeat(4)
  418. ds1 = ds1.map(operations=decode_op, input_columns=["image"], cache=some_cache)
  419. with pytest.raises(RuntimeError) as e:
  420. num_iter = 0
  421. for _ in ds1.create_dict_iterator(num_epochs=1):
  422. num_iter += 1
  423. assert "Repeat is not supported as a descendant operator under a mappable cache" in str(e.value)
  424. assert num_iter == 0
  425. logger.info('test_cache_failure8 Ended.\n')
  426. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  427. def test_cache_map_parameter_check():
  428. """
  429. Test illegal parameters for DatasetCache
  430. """
  431. logger.info("Test cache map parameter check")
  432. with pytest.raises(ValueError) as info:
  433. ds.DatasetCache(session_id=-1, size=0, spilling=True)
  434. assert "Input is not within the required interval" in str(info.value)
  435. with pytest.raises(TypeError) as info:
  436. ds.DatasetCache(session_id="1", size=0, spilling=True)
  437. assert "Argument session_id with value 1 is not of type (<class 'int'>,)" in str(info.value)
  438. with pytest.raises(TypeError) as info:
  439. ds.DatasetCache(session_id=None, size=0, spilling=True)
  440. assert "Argument session_id with value None is not of type (<class 'int'>,)" in str(info.value)
  441. with pytest.raises(ValueError) as info:
  442. ds.DatasetCache(session_id=1, size=-1, spilling=True)
  443. assert "Input size must be greater than 0" in str(info.value)
  444. with pytest.raises(TypeError) as info:
  445. ds.DatasetCache(session_id=1, size="1", spilling=True)
  446. assert "Argument size with value 1 is not of type (<class 'int'>,)" in str(info.value)
  447. with pytest.raises(TypeError) as info:
  448. ds.DatasetCache(session_id=1, size=None, spilling=True)
  449. assert "Argument size with value None is not of type (<class 'int'>,)" in str(info.value)
  450. with pytest.raises(TypeError) as info:
  451. ds.DatasetCache(session_id=1, size=0, spilling="illegal")
  452. assert "Argument spilling with value illegal is not of type (<class 'bool'>,)" in str(info.value)
  453. with pytest.raises(TypeError) as err:
  454. ds.DatasetCache(session_id=1, size=0, spilling=True, hostname=50052)
  455. assert "Argument hostname with value 50052 is not of type (<class 'str'>,)" in str(err.value)
  456. with pytest.raises(RuntimeError) as err:
  457. ds.DatasetCache(session_id=1, size=0, spilling=True, hostname="illegal")
  458. assert "Unexpected error. now cache client has to be on the same host with cache server" in str(err.value)
  459. with pytest.raises(RuntimeError) as err:
  460. ds.DatasetCache(session_id=1, size=0, spilling=True, hostname="127.0.0.2")
  461. assert "Unexpected error. now cache client has to be on the same host with cache server" in str(err.value)
  462. with pytest.raises(TypeError) as info:
  463. ds.DatasetCache(session_id=1, size=0, spilling=True, port="illegal")
  464. assert "Argument port with value illegal is not of type (<class 'int'>,)" in str(info.value)
  465. with pytest.raises(TypeError) as info:
  466. ds.DatasetCache(session_id=1, size=0, spilling=True, port="50052")
  467. assert "Argument port with value 50052 is not of type (<class 'int'>,)" in str(info.value)
  468. with pytest.raises(ValueError) as err:
  469. ds.DatasetCache(session_id=1, size=0, spilling=True, port=0)
  470. assert "Input port is not within the required interval of (1025 to 65535)" in str(err.value)
  471. with pytest.raises(ValueError) as err:
  472. ds.DatasetCache(session_id=1, size=0, spilling=True, port=65536)
  473. assert "Input port is not within the required interval of (1025 to 65535)" in str(err.value)
  474. with pytest.raises(TypeError) as err:
  475. ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=True)
  476. assert "Argument cache with value True is not of type" in str(err.value)
  477. logger.info("test_cache_map_parameter_check Ended.\n")
  478. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  479. def test_cache_map_running_twice1():
  480. """
  481. Executing the same pipeline for twice (from python), with cache injected after map
  482. Repeat
  483. |
  484. Cache
  485. |
  486. Map(decode)
  487. |
  488. ImageFolder
  489. """
  490. logger.info("Test cache map running twice 1")
  491. if "SESSION_ID" in os.environ:
  492. session_id = int(os.environ['SESSION_ID'])
  493. else:
  494. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  495. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  496. # This DATA_DIR only has 2 images in it
  497. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  498. decode_op = c_vision.Decode()
  499. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  500. ds1 = ds1.repeat(4)
  501. num_iter = 0
  502. for _ in ds1.create_dict_iterator():
  503. num_iter += 1
  504. logger.info("Number of data in ds1: {} ".format(num_iter))
  505. assert num_iter == 8
  506. num_iter = 0
  507. for _ in ds1.create_dict_iterator():
  508. num_iter += 1
  509. logger.info("Number of data in ds1: {} ".format(num_iter))
  510. assert num_iter == 8
  511. logger.info("test_cache_map_running_twice1 Ended.\n")
  512. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  513. def test_cache_map_running_twice2():
  514. """
  515. Executing the same pipeline for twice (from shell), with cache injected after leaf
  516. Repeat
  517. |
  518. Map(decode)
  519. |
  520. Cache
  521. |
  522. ImageFolder
  523. """
  524. logger.info("Test cache map running twice 2")
  525. if "SESSION_ID" in os.environ:
  526. session_id = int(os.environ['SESSION_ID'])
  527. else:
  528. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  529. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  530. # This DATA_DIR only has 2 images in it
  531. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  532. decode_op = c_vision.Decode()
  533. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  534. ds1 = ds1.repeat(4)
  535. num_iter = 0
  536. for _ in ds1.create_dict_iterator():
  537. num_iter += 1
  538. logger.info("Number of data in ds1: {} ".format(num_iter))
  539. assert num_iter == 8
  540. logger.info("test_cache_map_running_twice2 Ended.\n")
  541. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  542. def test_cache_map_extra_small_size1():
  543. """
  544. Test running pipeline with cache of extra small size and spilling true
  545. Repeat
  546. |
  547. Map(decode)
  548. |
  549. Cache
  550. |
  551. ImageFolder
  552. """
  553. logger.info("Test cache map extra small size 1")
  554. if "SESSION_ID" in os.environ:
  555. session_id = int(os.environ['SESSION_ID'])
  556. else:
  557. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  558. some_cache = ds.DatasetCache(session_id=session_id, size=1, spilling=True)
  559. # This DATA_DIR only has 2 images in it
  560. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  561. decode_op = c_vision.Decode()
  562. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  563. ds1 = ds1.repeat(4)
  564. num_iter = 0
  565. for _ in ds1.create_dict_iterator():
  566. num_iter += 1
  567. logger.info("Number of data in ds1: {} ".format(num_iter))
  568. assert num_iter == 8
  569. logger.info("test_cache_map_extra_small_size1 Ended.\n")
  570. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  571. def test_cache_map_extra_small_size2():
  572. """
  573. Test running pipeline with cache of extra small size and spilling false
  574. Repeat
  575. |
  576. Cache
  577. |
  578. Map(decode)
  579. |
  580. ImageFolder
  581. """
  582. logger.info("Test cache map extra small size 2")
  583. if "SESSION_ID" in os.environ:
  584. session_id = int(os.environ['SESSION_ID'])
  585. else:
  586. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  587. some_cache = ds.DatasetCache(session_id=session_id, size=1, spilling=False)
  588. # This DATA_DIR only has 2 images in it
  589. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  590. decode_op = c_vision.Decode()
  591. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  592. ds1 = ds1.repeat(4)
  593. num_iter = 0
  594. for _ in ds1.create_dict_iterator():
  595. num_iter += 1
  596. logger.info("Number of data in ds1: {} ".format(num_iter))
  597. assert num_iter == 8
  598. logger.info("test_cache_map_extra_small_size2 Ended.\n")
  599. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  600. def test_cache_map_no_image():
  601. """
  602. Test cache with no dataset existing in the path
  603. Repeat
  604. |
  605. Map(decode)
  606. |
  607. Cache
  608. |
  609. ImageFolder
  610. """
  611. logger.info("Test cache map no image")
  612. if "SESSION_ID" in os.environ:
  613. session_id = int(os.environ['SESSION_ID'])
  614. else:
  615. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  616. some_cache = ds.DatasetCache(session_id=session_id, size=1, spilling=False)
  617. # This DATA_DIR only has 2 images in it
  618. ds1 = ds.ImageFolderDataset(dataset_dir=NO_IMAGE_DIR, cache=some_cache)
  619. decode_op = c_vision.Decode()
  620. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  621. ds1 = ds1.repeat(4)
  622. with pytest.raises(RuntimeError):
  623. num_iter = 0
  624. for _ in ds1.create_dict_iterator():
  625. num_iter += 1
  626. assert num_iter == 0
  627. logger.info("test_cache_map_no_image Ended.\n")
  628. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  629. def test_cache_map_parallel_pipeline1(shard):
  630. """
  631. Test running two parallel pipelines (sharing cache) with cache injected after leaf op
  632. Repeat
  633. |
  634. Map(decode)
  635. |
  636. Cache
  637. |
  638. ImageFolder
  639. """
  640. logger.info("Test cache map parallel pipeline 1")
  641. if "SESSION_ID" in os.environ:
  642. session_id = int(os.environ['SESSION_ID'])
  643. else:
  644. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  645. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  646. # This DATA_DIR only has 2 images in it
  647. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, num_shards=2, shard_id=int(shard), cache=some_cache)
  648. decode_op = c_vision.Decode()
  649. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  650. ds1 = ds1.repeat(4)
  651. num_iter = 0
  652. for _ in ds1.create_dict_iterator():
  653. num_iter += 1
  654. logger.info("Number of data in ds1: {} ".format(num_iter))
  655. assert num_iter == 4
  656. logger.info("test_cache_map_parallel_pipeline1 Ended.\n")
  657. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  658. def test_cache_map_parallel_pipeline2(shard):
  659. """
  660. Test running two parallel pipelines (sharing cache) with cache injected after map op
  661. Repeat
  662. |
  663. Cache
  664. |
  665. Map(decode)
  666. |
  667. ImageFolder
  668. """
  669. logger.info("Test cache map parallel pipeline 2")
  670. if "SESSION_ID" in os.environ:
  671. session_id = int(os.environ['SESSION_ID'])
  672. else:
  673. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  674. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  675. # This DATA_DIR only has 2 images in it
  676. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, num_shards=2, shard_id=int(shard))
  677. decode_op = c_vision.Decode()
  678. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  679. ds1 = ds1.repeat(4)
  680. num_iter = 0
  681. for _ in ds1.create_dict_iterator():
  682. num_iter += 1
  683. logger.info("Number of data in ds1: {} ".format(num_iter))
  684. assert num_iter == 4
  685. logger.info("test_cache_map_parallel_pipeline2 Ended.\n")
  686. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  687. def test_cache_map_parallel_workers():
  688. """
  689. Test cache with num_parallel_workers > 1 set for map op and leaf op
  690. Repeat
  691. |
  692. cache
  693. |
  694. Map(decode)
  695. |
  696. ImageFolder
  697. """
  698. logger.info("Test cache map parallel workers")
  699. if "SESSION_ID" in os.environ:
  700. session_id = int(os.environ['SESSION_ID'])
  701. else:
  702. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  703. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  704. # This DATA_DIR only has 2 images in it
  705. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, num_parallel_workers=4)
  706. decode_op = c_vision.Decode()
  707. ds1 = ds1.map(input_columns=["image"], operations=decode_op, num_parallel_workers=4, cache=some_cache)
  708. ds1 = ds1.repeat(4)
  709. num_iter = 0
  710. for _ in ds1.create_dict_iterator():
  711. num_iter += 1
  712. logger.info("Number of data in ds1: {} ".format(num_iter))
  713. assert num_iter == 8
  714. logger.info("test_cache_map_parallel_workers Ended.\n")
  715. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  716. def test_cache_map_server_workers_1():
  717. """
  718. start cache server with --workers 1 and then test cache function
  719. Repeat
  720. |
  721. cache
  722. |
  723. Map(decode)
  724. |
  725. ImageFolder
  726. """
  727. logger.info("Test cache map server workers 1")
  728. if "SESSION_ID" in os.environ:
  729. session_id = int(os.environ['SESSION_ID'])
  730. else:
  731. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  732. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  733. # This DATA_DIR only has 2 images in it
  734. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  735. decode_op = c_vision.Decode()
  736. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  737. ds1 = ds1.repeat(4)
  738. num_iter = 0
  739. for _ in ds1.create_dict_iterator():
  740. num_iter += 1
  741. logger.info("Number of data in ds1: {} ".format(num_iter))
  742. assert num_iter == 8
  743. logger.info("test_cache_map_server_workers_1 Ended.\n")
  744. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  745. def test_cache_map_server_workers_100():
  746. """
  747. start cache server with --workers 100 and then test cache function
  748. Repeat
  749. |
  750. Map(decode)
  751. |
  752. cache
  753. |
  754. ImageFolder
  755. """
  756. logger.info("Test cache map server workers 100")
  757. if "SESSION_ID" in os.environ:
  758. session_id = int(os.environ['SESSION_ID'])
  759. else:
  760. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  761. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  762. # This DATA_DIR only has 2 images in it
  763. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  764. decode_op = c_vision.Decode()
  765. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  766. ds1 = ds1.repeat(4)
  767. num_iter = 0
  768. for _ in ds1.create_dict_iterator():
  769. num_iter += 1
  770. logger.info("Number of data in ds1: {} ".format(num_iter))
  771. assert num_iter == 8
  772. logger.info("test_cache_map_server_workers_100 Ended.\n")
  773. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  774. def test_cache_map_num_connections_1():
  775. """
  776. Test setting num_connections=1 in DatasetCache
  777. Repeat
  778. |
  779. cache
  780. |
  781. Map(decode)
  782. |
  783. ImageFolder
  784. """
  785. logger.info("Test cache map num_connections 1")
  786. if "SESSION_ID" in os.environ:
  787. session_id = int(os.environ['SESSION_ID'])
  788. else:
  789. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  790. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, num_connections=1)
  791. # This DATA_DIR only has 2 images in it
  792. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  793. decode_op = c_vision.Decode()
  794. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  795. ds1 = ds1.repeat(4)
  796. num_iter = 0
  797. for _ in ds1.create_dict_iterator():
  798. num_iter += 1
  799. logger.info("Number of data in ds1: {} ".format(num_iter))
  800. assert num_iter == 8
  801. logger.info("test_cache_map_num_connections_1 Ended.\n")
  802. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  803. def test_cache_map_num_connections_100():
  804. """
  805. Test setting num_connections=100 in DatasetCache
  806. Repeat
  807. |
  808. Map(decode)
  809. |
  810. cache
  811. |
  812. ImageFolder
  813. """
  814. logger.info("Test cache map num_connections 100")
  815. if "SESSION_ID" in os.environ:
  816. session_id = int(os.environ['SESSION_ID'])
  817. else:
  818. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  819. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, num_connections=100)
  820. # This DATA_DIR only has 2 images in it
  821. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  822. decode_op = c_vision.Decode()
  823. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  824. ds1 = ds1.repeat(4)
  825. num_iter = 0
  826. for _ in ds1.create_dict_iterator():
  827. num_iter += 1
  828. logger.info("Number of data in ds1: {} ".format(num_iter))
  829. assert num_iter == 8
  830. logger.info("test_cache_map_num_connections_100 Ended.\n")
  831. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  832. def test_cache_map_prefetch_size_1():
  833. """
  834. Test setting prefetch_size=1 in DatasetCache
  835. Repeat
  836. |
  837. cache
  838. |
  839. Map(decode)
  840. |
  841. ImageFolder
  842. """
  843. logger.info("Test cache map prefetch_size 1")
  844. if "SESSION_ID" in os.environ:
  845. session_id = int(os.environ['SESSION_ID'])
  846. else:
  847. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  848. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, prefetch_size=1)
  849. # This DATA_DIR only has 2 images in it
  850. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  851. decode_op = c_vision.Decode()
  852. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  853. ds1 = ds1.repeat(4)
  854. num_iter = 0
  855. for _ in ds1.create_dict_iterator():
  856. num_iter += 1
  857. logger.info("Number of data in ds1: {} ".format(num_iter))
  858. assert num_iter == 8
  859. logger.info("test_cache_map_prefetch_size_1 Ended.\n")
  860. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  861. def test_cache_map_prefetch_size_100():
  862. """
  863. Test setting prefetch_size=100 in DatasetCache
  864. Repeat
  865. |
  866. Map(decode)
  867. |
  868. cache
  869. |
  870. ImageFolder
  871. """
  872. logger.info("Test cache map prefetch_size 100")
  873. if "SESSION_ID" in os.environ:
  874. session_id = int(os.environ['SESSION_ID'])
  875. else:
  876. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  877. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True, prefetch_size=100)
  878. # This DATA_DIR only has 2 images in it
  879. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  880. decode_op = c_vision.Decode()
  881. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  882. ds1 = ds1.repeat(4)
  883. num_iter = 0
  884. for _ in ds1.create_dict_iterator():
  885. num_iter += 1
  886. logger.info("Number of data in ds1: {} ".format(num_iter))
  887. assert num_iter == 8
  888. logger.info("test_cache_map_prefetch_size_100 Ended.\n")
  889. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  890. def test_cache_map_to_device():
  891. """
  892. Test cache with to_device
  893. DeviceQueue
  894. |
  895. EpochCtrl
  896. |
  897. Repeat
  898. |
  899. Map(decode)
  900. |
  901. cache
  902. |
  903. ImageFolder
  904. """
  905. logger.info("Test cache map to_device")
  906. if "SESSION_ID" in os.environ:
  907. session_id = int(os.environ['SESSION_ID'])
  908. else:
  909. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  910. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  911. # This DATA_DIR only has 2 images in it
  912. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  913. decode_op = c_vision.Decode()
  914. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  915. ds1 = ds1.repeat(4)
  916. ds1 = ds1.to_device()
  917. ds1.send()
  918. logger.info("test_cache_map_to_device Ended.\n")
  919. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  920. def test_cache_map_epoch_ctrl1():
  921. """
  922. Test using two-loops method to run several epochs
  923. Map(decode)
  924. |
  925. cache
  926. |
  927. ImageFolder
  928. """
  929. logger.info("Test cache map epoch ctrl1")
  930. if "SESSION_ID" in os.environ:
  931. session_id = int(os.environ['SESSION_ID'])
  932. else:
  933. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  934. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  935. # This DATA_DIR only has 2 images in it
  936. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  937. decode_op = c_vision.Decode()
  938. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  939. num_epoch = 5
  940. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  941. epoch_count = 0
  942. for _ in range(num_epoch):
  943. row_count = 0
  944. for _ in iter1:
  945. row_count += 1
  946. logger.info("Number of data in ds1: {} ".format(row_count))
  947. assert row_count == 2
  948. epoch_count += 1
  949. assert epoch_count == num_epoch
  950. logger.info("test_cache_map_epoch_ctrl1 Ended.\n")
  951. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  952. def test_cache_map_epoch_ctrl2():
  953. """
  954. Test using two-loops method with infinite epochs
  955. cache
  956. |
  957. Map(decode)
  958. |
  959. ImageFolder
  960. """
  961. logger.info("Test cache map epoch ctrl2")
  962. if "SESSION_ID" in os.environ:
  963. session_id = int(os.environ['SESSION_ID'])
  964. else:
  965. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  966. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  967. # This DATA_DIR only has 2 images in it
  968. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR)
  969. decode_op = c_vision.Decode()
  970. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  971. num_epoch = 5
  972. # iter1 will always assume there is a next epoch and never shutdown
  973. iter1 = ds1.create_dict_iterator()
  974. epoch_count = 0
  975. for _ in range(num_epoch):
  976. row_count = 0
  977. for _ in iter1:
  978. row_count += 1
  979. logger.info("Number of data in ds1: {} ".format(row_count))
  980. assert row_count == 2
  981. epoch_count += 1
  982. assert epoch_count == num_epoch
  983. # manually stop the iterator
  984. iter1.stop()
  985. logger.info("test_cache_map_epoch_ctrl2 Ended.\n")
  986. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  987. def test_cache_map_epoch_ctrl3():
  988. """
  989. Test using two-loops method with infinite epochs over repeat
  990. repeat
  991. |
  992. Map(decode)
  993. |
  994. cache
  995. |
  996. ImageFolder
  997. """
  998. logger.info("Test cache map epoch ctrl3")
  999. if "SESSION_ID" in os.environ:
  1000. session_id = int(os.environ['SESSION_ID'])
  1001. else:
  1002. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1003. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1004. # This DATA_DIR only has 2 images in it
  1005. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  1006. decode_op = c_vision.Decode()
  1007. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  1008. ds1 = ds1.repeat(2)
  1009. num_epoch = 5
  1010. # iter1 will always assume there is a next epoch and never shutdown
  1011. iter1 = ds1.create_dict_iterator()
  1012. epoch_count = 0
  1013. for _ in range(num_epoch):
  1014. row_count = 0
  1015. for _ in iter1:
  1016. row_count += 1
  1017. logger.info("Number of data in ds1: {} ".format(row_count))
  1018. assert row_count == 4
  1019. epoch_count += 1
  1020. assert epoch_count == num_epoch
  1021. # reply on garbage collector to destroy iter1
  1022. logger.info("test_cache_map_epoch_ctrl3 Ended.\n")
  1023. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1024. def test_cache_map_coco1():
  1025. """
  1026. Test mappable coco leaf with cache op right over the leaf
  1027. cache
  1028. |
  1029. Coco
  1030. """
  1031. logger.info("Test cache map coco1")
  1032. if "SESSION_ID" in os.environ:
  1033. session_id = int(os.environ['SESSION_ID'])
  1034. else:
  1035. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1036. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1037. # This dataset has 6 records
  1038. ds1 = ds.CocoDataset(COCO_DATA_DIR, annotation_file=COCO_ANNOTATION_FILE, task="Detection", decode=True,
  1039. cache=some_cache)
  1040. num_epoch = 4
  1041. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1042. epoch_count = 0
  1043. for _ in range(num_epoch):
  1044. assert sum([1 for _ in iter1]) == 6
  1045. epoch_count += 1
  1046. assert epoch_count == num_epoch
  1047. logger.info("test_cache_map_coco1 Ended.\n")
  1048. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1049. def test_cache_map_coco2():
  1050. """
  1051. Test mappable coco leaf with the cache op later in the tree above the map(resize)
  1052. cache
  1053. |
  1054. Map(resize)
  1055. |
  1056. Coco
  1057. """
  1058. logger.info("Test cache map coco2")
  1059. if "SESSION_ID" in os.environ:
  1060. session_id = int(os.environ['SESSION_ID'])
  1061. else:
  1062. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1063. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1064. # This dataset has 6 records
  1065. ds1 = ds.CocoDataset(COCO_DATA_DIR, annotation_file=COCO_ANNOTATION_FILE, task="Detection", decode=True)
  1066. resize_op = c_vision.Resize((224, 224))
  1067. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  1068. num_epoch = 4
  1069. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1070. epoch_count = 0
  1071. for _ in range(num_epoch):
  1072. assert sum([1 for _ in iter1]) == 6
  1073. epoch_count += 1
  1074. assert epoch_count == num_epoch
  1075. logger.info("test_cache_map_coco2 Ended.\n")
  1076. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1077. def test_cache_map_mnist1():
  1078. """
  1079. Test mappable mnist leaf with cache op right over the leaf
  1080. cache
  1081. |
  1082. Mnist
  1083. """
  1084. logger.info("Test cache map mnist1")
  1085. if "SESSION_ID" in os.environ:
  1086. session_id = int(os.environ['SESSION_ID'])
  1087. else:
  1088. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1089. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1090. ds1 = ds.MnistDataset(MNIST_DATA_DIR, num_samples=10, cache=some_cache)
  1091. num_epoch = 4
  1092. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1093. epoch_count = 0
  1094. for _ in range(num_epoch):
  1095. assert sum([1 for _ in iter1]) == 10
  1096. epoch_count += 1
  1097. assert epoch_count == num_epoch
  1098. logger.info("test_cache_map_mnist1 Ended.\n")
  1099. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1100. def test_cache_map_mnist2():
  1101. """
  1102. Test mappable mnist leaf with the cache op later in the tree above the map(resize)
  1103. cache
  1104. |
  1105. Map(resize)
  1106. |
  1107. Mnist
  1108. """
  1109. logger.info("Test cache map mnist2")
  1110. if "SESSION_ID" in os.environ:
  1111. session_id = int(os.environ['SESSION_ID'])
  1112. else:
  1113. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1114. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1115. ds1 = ds.MnistDataset(MNIST_DATA_DIR, num_samples=10)
  1116. resize_op = c_vision.Resize((224, 224))
  1117. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  1118. num_epoch = 4
  1119. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1120. epoch_count = 0
  1121. for _ in range(num_epoch):
  1122. assert sum([1 for _ in iter1]) == 10
  1123. epoch_count += 1
  1124. assert epoch_count == num_epoch
  1125. logger.info("test_cache_map_mnist2 Ended.\n")
  1126. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1127. def test_cache_map_celeba1():
  1128. """
  1129. Test mappable celeba leaf with cache op right over the leaf
  1130. cache
  1131. |
  1132. CelebA
  1133. """
  1134. logger.info("Test cache map celeba1")
  1135. if "SESSION_ID" in os.environ:
  1136. session_id = int(os.environ['SESSION_ID'])
  1137. else:
  1138. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1139. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1140. # This dataset has 4 records
  1141. ds1 = ds.CelebADataset(CELEBA_DATA_DIR, shuffle=False, decode=True, cache=some_cache)
  1142. num_epoch = 4
  1143. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1144. epoch_count = 0
  1145. for _ in range(num_epoch):
  1146. assert sum([1 for _ in iter1]) == 4
  1147. epoch_count += 1
  1148. assert epoch_count == num_epoch
  1149. logger.info("test_cache_map_celeba1 Ended.\n")
  1150. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1151. def test_cache_map_celeba2():
  1152. """
  1153. Test mappable celeba leaf with the cache op later in the tree above the map(resize)
  1154. cache
  1155. |
  1156. Map(resize)
  1157. |
  1158. CelebA
  1159. """
  1160. logger.info("Test cache map celeba2")
  1161. if "SESSION_ID" in os.environ:
  1162. session_id = int(os.environ['SESSION_ID'])
  1163. else:
  1164. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1165. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1166. # This dataset has 4 records
  1167. ds1 = ds.CelebADataset(CELEBA_DATA_DIR, shuffle=False, decode=True)
  1168. resize_op = c_vision.Resize((224, 224))
  1169. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  1170. num_epoch = 4
  1171. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1172. epoch_count = 0
  1173. for _ in range(num_epoch):
  1174. assert sum([1 for _ in iter1]) == 4
  1175. epoch_count += 1
  1176. assert epoch_count == num_epoch
  1177. logger.info("test_cache_map_celeba2 Ended.\n")
  1178. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1179. def test_cache_map_manifest1():
  1180. """
  1181. Test mappable manifest leaf with cache op right over the leaf
  1182. cache
  1183. |
  1184. Manifest
  1185. """
  1186. logger.info("Test cache map manifest1")
  1187. if "SESSION_ID" in os.environ:
  1188. session_id = int(os.environ['SESSION_ID'])
  1189. else:
  1190. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1191. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1192. # This dataset has 4 records
  1193. ds1 = ds.ManifestDataset(MANIFEST_DATA_FILE, decode=True, cache=some_cache)
  1194. num_epoch = 4
  1195. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1196. epoch_count = 0
  1197. for _ in range(num_epoch):
  1198. assert sum([1 for _ in iter1]) == 4
  1199. epoch_count += 1
  1200. assert epoch_count == num_epoch
  1201. logger.info("test_cache_map_manifest1 Ended.\n")
  1202. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1203. def test_cache_map_manifest2():
  1204. """
  1205. Test mappable manifest leaf with the cache op later in the tree above the map(resize)
  1206. cache
  1207. |
  1208. Map(resize)
  1209. |
  1210. Manifest
  1211. """
  1212. logger.info("Test cache map manifest2")
  1213. if "SESSION_ID" in os.environ:
  1214. session_id = int(os.environ['SESSION_ID'])
  1215. else:
  1216. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1217. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1218. # This dataset has 4 records
  1219. ds1 = ds.ManifestDataset(MANIFEST_DATA_FILE, decode=True)
  1220. resize_op = c_vision.Resize((224, 224))
  1221. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  1222. num_epoch = 4
  1223. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1224. epoch_count = 0
  1225. for _ in range(num_epoch):
  1226. assert sum([1 for _ in iter1]) == 4
  1227. epoch_count += 1
  1228. assert epoch_count == num_epoch
  1229. logger.info("test_cache_map_manifest2 Ended.\n")
  1230. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1231. def test_cache_map_cifar1():
  1232. """
  1233. Test mappable cifar10 leaf with cache op right over the leaf
  1234. cache
  1235. |
  1236. Cifar10
  1237. """
  1238. logger.info("Test cache map cifar1")
  1239. if "SESSION_ID" in os.environ:
  1240. session_id = int(os.environ['SESSION_ID'])
  1241. else:
  1242. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1243. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1244. ds1 = ds.Cifar10Dataset(CIFAR10_DATA_DIR, num_samples=10, cache=some_cache)
  1245. num_epoch = 4
  1246. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1247. epoch_count = 0
  1248. for _ in range(num_epoch):
  1249. assert sum([1 for _ in iter1]) == 10
  1250. epoch_count += 1
  1251. assert epoch_count == num_epoch
  1252. logger.info("test_cache_map_cifar1 Ended.\n")
  1253. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1254. def test_cache_map_cifar2():
  1255. """
  1256. Test mappable cifar100 leaf with the cache op later in the tree above the map(resize)
  1257. cache
  1258. |
  1259. Map(resize)
  1260. |
  1261. Cifar100
  1262. """
  1263. logger.info("Test cache map cifar2")
  1264. if "SESSION_ID" in os.environ:
  1265. session_id = int(os.environ['SESSION_ID'])
  1266. else:
  1267. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1268. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1269. ds1 = ds.Cifar100Dataset(CIFAR100_DATA_DIR, num_samples=10)
  1270. resize_op = c_vision.Resize((224, 224))
  1271. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  1272. num_epoch = 4
  1273. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1274. epoch_count = 0
  1275. for _ in range(num_epoch):
  1276. assert sum([1 for _ in iter1]) == 10
  1277. epoch_count += 1
  1278. assert epoch_count == num_epoch
  1279. logger.info("test_cache_map_cifar2 Ended.\n")
  1280. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1281. def test_cache_map_voc1():
  1282. """
  1283. Test mappable voc leaf with cache op right over the leaf
  1284. cache
  1285. |
  1286. VOC
  1287. """
  1288. logger.info("Test cache map voc1")
  1289. if "SESSION_ID" in os.environ:
  1290. session_id = int(os.environ['SESSION_ID'])
  1291. else:
  1292. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1293. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1294. # This dataset has 9 records
  1295. ds1 = ds.VOCDataset(VOC_DATA_DIR, task="Detection", usage="train", shuffle=False, decode=True, cache=some_cache)
  1296. num_epoch = 4
  1297. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1298. epoch_count = 0
  1299. for _ in range(num_epoch):
  1300. assert sum([1 for _ in iter1]) == 9
  1301. epoch_count += 1
  1302. assert epoch_count == num_epoch
  1303. logger.info("test_cache_map_voc1 Ended.\n")
  1304. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1305. def test_cache_map_voc2():
  1306. """
  1307. Test mappable voc leaf with the cache op later in the tree above the map(resize)
  1308. cache
  1309. |
  1310. Map(resize)
  1311. |
  1312. VOC
  1313. """
  1314. logger.info("Test cache map voc2")
  1315. if "SESSION_ID" in os.environ:
  1316. session_id = int(os.environ['SESSION_ID'])
  1317. else:
  1318. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1319. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1320. # This dataset has 9 records
  1321. ds1 = ds.VOCDataset(VOC_DATA_DIR, task="Detection", usage="train", shuffle=False, decode=True)
  1322. resize_op = c_vision.Resize((224, 224))
  1323. ds1 = ds1.map(input_columns=["image"], operations=resize_op, cache=some_cache)
  1324. num_epoch = 4
  1325. iter1 = ds1.create_dict_iterator(num_epochs=num_epoch)
  1326. epoch_count = 0
  1327. for _ in range(num_epoch):
  1328. assert sum([1 for _ in iter1]) == 9
  1329. epoch_count += 1
  1330. assert epoch_count == num_epoch
  1331. logger.info("test_cache_map_voc2 Ended.\n")
  1332. class ReverseSampler(ds.Sampler):
  1333. def __iter__(self):
  1334. for i in range(self.dataset_size - 1, -1, -1):
  1335. yield i
  1336. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1337. def test_cache_map_python_sampler1():
  1338. """
  1339. Test using a python sampler, and cache after leaf
  1340. Repeat
  1341. |
  1342. Map(decode)
  1343. |
  1344. cache
  1345. |
  1346. ImageFolder
  1347. """
  1348. logger.info("Test cache map python sampler1")
  1349. if "SESSION_ID" in os.environ:
  1350. session_id = int(os.environ['SESSION_ID'])
  1351. else:
  1352. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1353. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1354. # This DATA_DIR only has 2 images in it
  1355. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, sampler=ReverseSampler(), cache=some_cache)
  1356. decode_op = c_vision.Decode()
  1357. ds1 = ds1.map(input_columns=["image"], operations=decode_op)
  1358. ds1 = ds1.repeat(4)
  1359. num_iter = 0
  1360. for _ in ds1.create_dict_iterator():
  1361. num_iter += 1
  1362. logger.info("Number of data in ds1: {} ".format(num_iter))
  1363. assert num_iter == 8
  1364. logger.info("test_cache_map_python_sampler1 Ended.\n")
  1365. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1366. def test_cache_map_python_sampler2():
  1367. """
  1368. Test using a python sampler, and cache after map
  1369. Repeat
  1370. |
  1371. cache
  1372. |
  1373. Map(decode)
  1374. |
  1375. ImageFolder
  1376. """
  1377. logger.info("Test cache map python sampler2")
  1378. if "SESSION_ID" in os.environ:
  1379. session_id = int(os.environ['SESSION_ID'])
  1380. else:
  1381. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1382. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1383. # This DATA_DIR only has 2 images in it
  1384. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, sampler=ReverseSampler())
  1385. decode_op = c_vision.Decode()
  1386. ds1 = ds1.map(input_columns=["image"], operations=decode_op, cache=some_cache)
  1387. ds1 = ds1.repeat(4)
  1388. num_iter = 0
  1389. for _ in ds1.create_dict_iterator():
  1390. num_iter += 1
  1391. logger.info("Number of data in ds1: {} ".format(num_iter))
  1392. assert num_iter == 8
  1393. logger.info("test_cache_map_python_sampler2 Ended.\n")
  1394. @pytest.mark.skipif(os.environ.get('RUN_CACHE_TEST') != 'TRUE', reason="Require to bring up cache server")
  1395. def test_cache_map_nested_repeat():
  1396. """
  1397. Test cache on pipeline with nested repeat ops
  1398. Repeat
  1399. |
  1400. Map(decode)
  1401. |
  1402. Repeat
  1403. |
  1404. Cache
  1405. |
  1406. ImageFolder
  1407. """
  1408. logger.info("Test cache map nested repeat")
  1409. if "SESSION_ID" in os.environ:
  1410. session_id = int(os.environ['SESSION_ID'])
  1411. else:
  1412. raise RuntimeError("Testcase requires SESSION_ID environment variable")
  1413. some_cache = ds.DatasetCache(session_id=session_id, size=0, spilling=True)
  1414. # This DATA_DIR only has 2 images in it
  1415. ds1 = ds.ImageFolderDataset(dataset_dir=DATA_DIR, cache=some_cache)
  1416. decode_op = c_vision.Decode()
  1417. ds1 = ds1.repeat(4)
  1418. ds1 = ds1.map(operations=decode_op, input_columns=["image"])
  1419. ds1 = ds1.repeat(2)
  1420. num_iter = 0
  1421. for _ in ds1.create_dict_iterator(num_epochs=1):
  1422. logger.info("get data from dataset")
  1423. num_iter += 1
  1424. logger.info("Number of data in ds1: {} ".format(num_iter))
  1425. assert num_iter == 16
  1426. logger.info('test_cache_map_nested_repeat Ended.\n')
  1427. if __name__ == '__main__':
  1428. test_cache_map_basic1()
  1429. test_cache_map_basic2()
  1430. test_cache_map_basic3()
  1431. test_cache_map_basic4()
  1432. test_cache_map_failure1()
  1433. test_cache_map_failure2()
  1434. test_cache_map_failure3()
  1435. test_cache_map_failure4()