You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_datasets_coco.py 26 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554
  1. # Copyright 2020-2022 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import numpy as np
  16. import mindspore.dataset as ds
  17. import mindspore.dataset.text as text
  18. import mindspore.dataset.vision.c_transforms as vision
  19. DATA_DIR = "../data/dataset/testCOCO/train/"
  20. DATA_DIR_2 = "../data/dataset/testCOCO/train"
  21. ANNOTATION_FILE = "../data/dataset/testCOCO/annotations/train.json"
  22. KEYPOINT_FILE = "../data/dataset/testCOCO/annotations/key_point.json"
  23. PANOPTIC_FILE = "../data/dataset/testCOCO/annotations/panoptic.json"
  24. CAPTIONS_FILE = "../data/dataset/testCOCO/annotations/captions.json"
  25. INVALID_FILE = "../data/dataset/testCOCO/annotations/invalid.json"
  26. LACKOFIMAGE_FILE = "../data/dataset/testCOCO/annotations/lack_of_images.json"
  27. INVALID_CATEGORY_ID_FILE = "../data/dataset/testCOCO/annotations/invalid_category_id.json"
  28. def test_coco_detection():
  29. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection",
  30. decode=True, shuffle=False, extra_metadata=True)
  31. data1 = data1.rename("_meta-filename", "filename")
  32. num_iter = 0
  33. file_name = []
  34. image_shape = []
  35. bbox = []
  36. category_id = []
  37. for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  38. file_name.append(text.to_str(data["filename"]))
  39. image_shape.append(data["image"].shape)
  40. bbox.append(data["bbox"])
  41. category_id.append(data["category_id"])
  42. num_iter += 1
  43. assert num_iter == 6
  44. assert file_name == ["000000391895", "000000318219", "000000554625", "000000574769",
  45. "000000060623", "000000309022"]
  46. assert image_shape[0] == (2268, 4032, 3)
  47. assert image_shape[1] == (561, 595, 3)
  48. assert image_shape[2] == (607, 585, 3)
  49. assert image_shape[3] == (642, 675, 3)
  50. assert image_shape[4] == (2268, 4032, 3)
  51. assert image_shape[5] == (2268, 4032, 3)
  52. np.testing.assert_array_equal(np.array([[10., 10., 10., 10.], [70., 70., 70., 70.]]), bbox[0])
  53. np.testing.assert_array_equal(np.array([[20., 20., 20., 20.], [80., 80., 80.0, 80.]]), bbox[1])
  54. np.testing.assert_array_equal(np.array([[30.0, 30.0, 30.0, 30.]]), bbox[2])
  55. np.testing.assert_array_equal(np.array([[40., 40., 40., 40.]]), bbox[3])
  56. np.testing.assert_array_equal(np.array([[50., 50., 50., 50.]]), bbox[4])
  57. np.testing.assert_array_equal(np.array([[60., 60., 60., 60.]]), bbox[5])
  58. np.testing.assert_array_equal(np.array([[1], [7]]), category_id[0])
  59. np.testing.assert_array_equal(np.array([[2], [8]]), category_id[1])
  60. np.testing.assert_array_equal(np.array([[3]]), category_id[2])
  61. np.testing.assert_array_equal(np.array([[4]]), category_id[3])
  62. np.testing.assert_array_equal(np.array([[5]]), category_id[4])
  63. np.testing.assert_array_equal(np.array([[6]]), category_id[5])
  64. def test_coco_stuff():
  65. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff",
  66. decode=True, shuffle=False, extra_metadata=True)
  67. data1 = data1.rename("_meta-filename", "filename")
  68. num_iter = 0
  69. file_name = []
  70. image_shape = []
  71. segmentation = []
  72. iscrowd = []
  73. for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  74. file_name.append(text.to_str(data["filename"]))
  75. image_shape.append(data["image"].shape)
  76. segmentation.append(data["segmentation"])
  77. iscrowd.append(data["iscrowd"])
  78. num_iter += 1
  79. assert num_iter == 6
  80. assert file_name == ["000000391895", "000000318219", "000000554625", "000000574769",
  81. "000000060623", "000000309022"]
  82. assert image_shape[0] == (2268, 4032, 3)
  83. assert image_shape[1] == (561, 595, 3)
  84. assert image_shape[2] == (607, 585, 3)
  85. assert image_shape[3] == (642, 675, 3)
  86. assert image_shape[4] == (2268, 4032, 3)
  87. assert image_shape[5] == (2268, 4032, 3)
  88. np.testing.assert_array_equal(np.array([[10., 12., 13., 14., 15., 16., 17., 18., 19., 20.],
  89. [70., 72., 73., 74., 75., -1., -1., -1., -1., -1.]]),
  90. segmentation[0])
  91. np.testing.assert_array_equal(np.array([[0], [0]]), iscrowd[0])
  92. np.testing.assert_array_equal(np.array([[20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0],
  93. [10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0]]),
  94. segmentation[1])
  95. np.testing.assert_array_equal(np.array([[0], [1]]), iscrowd[1])
  96. np.testing.assert_array_equal(np.array([[40., 42., 43., 44., 45., 46., 47., 48., 49., 40., 41., 42.]]),
  97. segmentation[2])
  98. np.testing.assert_array_equal(np.array([[0]]), iscrowd[2])
  99. np.testing.assert_array_equal(np.array([[50., 52., 53., 54., 55., 56., 57., 58., 59., 60., 61., 62., 63.]]),
  100. segmentation[3])
  101. np.testing.assert_array_equal(np.array([[0]]), iscrowd[3])
  102. np.testing.assert_array_equal(np.array([[60., 62., 63., 64., 65., 66., 67., 68., 69., 70., 71., 72., 73., 74.]]),
  103. segmentation[4])
  104. np.testing.assert_array_equal(np.array([[0]]), iscrowd[4])
  105. np.testing.assert_array_equal(np.array([[60., 62., 63., 64., 65., 66., 67.], [68., 69., 70., 71., 72., 73., 74.]]),
  106. segmentation[5])
  107. np.testing.assert_array_equal(np.array([[0]]), iscrowd[5])
  108. def test_coco_keypoint():
  109. data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint",
  110. decode=True, shuffle=False, extra_metadata=True)
  111. data1 = data1.rename("_meta-filename", "filename")
  112. num_iter = 0
  113. file_name = []
  114. image_shape = []
  115. keypoints = []
  116. num_keypoints = []
  117. for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  118. file_name.append(text.to_str(data["filename"]))
  119. image_shape.append(data["image"].shape)
  120. keypoints.append(data["keypoints"])
  121. num_keypoints.append(data["num_keypoints"])
  122. num_iter += 1
  123. assert num_iter == 2
  124. assert file_name == ["000000391895", "000000318219"]
  125. assert image_shape[0] == (2268, 4032, 3)
  126. assert image_shape[1] == (561, 595, 3)
  127. np.testing.assert_array_equal(np.array([[368., 61., 1., 369., 52., 2., 0., 0., 0., 382., 48., 2., 0., 0., 0., 368.,
  128. 84., 2., 435., 81., 2., 362., 125., 2., 446., 125., 2., 360., 153., 2., 0.,
  129. 0., 0., 397., 167., 1., 439., 166., 1., 369., 193., 2., 461., 234., 2.,
  130. 361., 246., 2., 474., 287., 2.]]), keypoints[0])
  131. np.testing.assert_array_equal(np.array([[14]]), num_keypoints[0])
  132. np.testing.assert_array_equal(np.array([[244., 139., 2., 0., 0., 0., 226., 118., 2., 0., 0., 0., 154., 159., 2.,
  133. 143., 261., 2., 135., 312., 2., 271., 423., 2., 184., 530., 2., 261., 280.,
  134. 2., 347., 592., 2., 0., 0., 0., 123., 596., 2., 0., 0., 0., 0., 0., 0., 0.,
  135. 0., 0., 0., 0., 0.]]),
  136. keypoints[1])
  137. np.testing.assert_array_equal(np.array([[10]]), num_keypoints[1])
  138. def test_coco_panoptic():
  139. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", decode=True, shuffle=False,
  140. extra_metadata=True)
  141. data1 = data1.rename("_meta-filename", "filename")
  142. num_iter = 0
  143. file_name = []
  144. image_shape = []
  145. bbox = []
  146. category_id = []
  147. iscrowd = []
  148. area = []
  149. for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  150. file_name.append(text.to_str(data["filename"]))
  151. image_shape.append(data["image"].shape)
  152. bbox.append(data["bbox"])
  153. category_id.append(data["category_id"])
  154. iscrowd.append(data["iscrowd"])
  155. area.append(data["area"])
  156. num_iter += 1
  157. assert num_iter == 2
  158. assert file_name == ["000000391895", "000000574769"]
  159. assert image_shape[0] == (2268, 4032, 3)
  160. np.testing.assert_array_equal(np.array([[472, 173, 36, 48], [340, 22, 154, 301], [486, 183, 30, 35]]), bbox[0])
  161. np.testing.assert_array_equal(np.array([[1], [1], [2]]), category_id[0])
  162. np.testing.assert_array_equal(np.array([[0], [0], [0]]), iscrowd[0])
  163. np.testing.assert_array_equal(np.array([[705], [14062], [626]]), area[0])
  164. assert image_shape[1] == (642, 675, 3)
  165. np.testing.assert_array_equal(np.array([[103, 133, 229, 422], [243, 175, 93, 164]]), bbox[1])
  166. np.testing.assert_array_equal(np.array([[1], [3]]), category_id[1])
  167. np.testing.assert_array_equal(np.array([[0], [0]]), iscrowd[1])
  168. np.testing.assert_array_equal(np.array([[43102], [6079]]), area[1])
  169. def test_coco_captioning():
  170. """
  171. Feature: CocoDataset
  172. Description: test the captioning task of CocoDataset
  173. Expectation: the data is processed successfully
  174. """
  175. data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning", decode=True, shuffle=False,
  176. extra_metadata=True)
  177. data1 = data1.rename("_meta-filename", "filename")
  178. num_iter = 0
  179. file_name = []
  180. image_shape = []
  181. captions_list = []
  182. for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  183. file_name.append(text.to_str(data["filename"]))
  184. image_shape.append(data["image"].shape)
  185. captions_list.append(data["captions"])
  186. num_iter += 1
  187. assert num_iter == 2
  188. assert file_name == ["000000391895", "000000318219"]
  189. assert image_shape[0] == (2268, 4032, 3)
  190. np.testing.assert_array_equal(np.array([[b"This is a banana"], [b"This banana is yellow"],
  191. [b"This banana is on a white table"],
  192. [b"The tail of this banana is facing up"],
  193. [b"This banana has spots"]]), captions_list[0])
  194. assert image_shape[1] == (561, 595, 3)
  195. np.testing.assert_array_equal(np.array([[b"This is an orange"], [b"This orange is orange"],
  196. [b"This orange is on a dark cloth"],
  197. [b"The head of this orange is facing up"],
  198. [b"This orange has spots"]]), captions_list[1])
  199. def test_coco_meta_column():
  200. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection",
  201. decode=True, shuffle=False, extra_metadata=True)
  202. for item in data1.create_tuple_iterator(num_epochs=1):
  203. assert len(item) == 4
  204. data2 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff",
  205. decode=True, shuffle=False, extra_metadata=True)
  206. for item in data2.create_tuple_iterator(num_epochs=1):
  207. assert len(item) == 3
  208. data3 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint",
  209. decode=True, shuffle=False, extra_metadata=True)
  210. for item in data3.create_tuple_iterator(num_epochs=1):
  211. assert len(item) == 3
  212. data4 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic",
  213. decode=True, shuffle=False, extra_metadata=True)
  214. for item in data4.create_tuple_iterator(num_epochs=1):
  215. assert len(item) == 5
  216. def test_coco_detection_classindex():
  217. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
  218. class_index = data1.get_class_indexing()
  219. assert class_index == {'person': [1], 'bicycle': [2], 'car': [3], 'cat': [4], 'dog': [5], 'monkey': [6],
  220. 'bag': [7], 'orange': [8]}
  221. num_iter = 0
  222. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  223. num_iter += 1
  224. assert num_iter == 6
  225. def test_coco_panootic_classindex():
  226. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", decode=True)
  227. class_index = data1.get_class_indexing()
  228. assert class_index == {'person': [1, 1], 'bicycle': [2, 1], 'car': [3, 1]}
  229. num_iter = 0
  230. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  231. num_iter += 1
  232. assert num_iter == 2
  233. def test_coco_case_0():
  234. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
  235. data1 = data1.shuffle(10)
  236. data1 = data1.batch(3, pad_info={})
  237. num_iter = 0
  238. for _ in data1.create_dict_iterator(num_epochs=1):
  239. num_iter += 1
  240. assert num_iter == 2
  241. def test_coco_case_1():
  242. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
  243. sizes = [0.5, 0.5]
  244. randomize = False
  245. dataset1, dataset2 = data1.split(sizes=sizes, randomize=randomize)
  246. num_iter = 0
  247. for _ in dataset1.create_dict_iterator(num_epochs=1):
  248. num_iter += 1
  249. assert num_iter == 3
  250. num_iter = 0
  251. for _ in dataset2.create_dict_iterator(num_epochs=1):
  252. num_iter += 1
  253. assert num_iter == 3
  254. def test_coco_case_2():
  255. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
  256. resize_op = vision.Resize((224, 224))
  257. data1 = data1.map(operations=resize_op, input_columns=["image"])
  258. data1 = data1.repeat(4)
  259. num_iter = 0
  260. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  261. num_iter += 1
  262. assert num_iter == 24
  263. def test_coco_case_3():
  264. data1 = ds.CocoDataset(DATA_DIR_2, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
  265. resize_op = vision.Resize((224, 224))
  266. data1 = data1.map(operations=resize_op, input_columns=["image"])
  267. data1 = data1.repeat(4)
  268. num_iter = 0
  269. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  270. num_iter += 1
  271. assert num_iter == 24
  272. def test_coco_case_exception():
  273. try:
  274. data1 = ds.CocoDataset("path_not_exist/", annotation_file=ANNOTATION_FILE, task="Detection")
  275. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  276. pass
  277. assert False
  278. except ValueError as e:
  279. assert "does not exist or is not a directory or permission denied" in str(e)
  280. try:
  281. data1 = ds.CocoDataset(DATA_DIR, annotation_file="./file_not_exist", task="Detection")
  282. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  283. pass
  284. assert False
  285. except ValueError as e:
  286. assert "does not exist or permission denied" in str(e)
  287. try:
  288. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Invalid task")
  289. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  290. pass
  291. assert False
  292. except ValueError as e:
  293. assert "Invalid task type" in str(e)
  294. try:
  295. data1 = ds.CocoDataset(DATA_DIR, annotation_file=LACKOFIMAGE_FILE, task="Detection")
  296. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  297. pass
  298. assert False
  299. except RuntimeError as e:
  300. assert "the attribute of 'images' is missing" in str(e)
  301. try:
  302. data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_CATEGORY_ID_FILE, task="Detection")
  303. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  304. pass
  305. assert False
  306. except RuntimeError as e:
  307. assert "the attribute of 'category_id': 7 is missing" in str(e)
  308. try:
  309. data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_FILE, task="Detection")
  310. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  311. pass
  312. assert False
  313. except RuntimeError as e:
  314. assert "Invalid annotation file, Coco Dataset annotation file:" in str(e)
  315. try:
  316. sampler = ds.PKSampler(3)
  317. data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_FILE, task="Detection", sampler=sampler)
  318. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  319. pass
  320. assert False
  321. except ValueError as e:
  322. assert "CocoDataset doesn't support PKSampler" in str(e)
  323. def exception_func(item):
  324. raise Exception("Error occur!")
  325. try:
  326. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
  327. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  328. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  329. pass
  330. assert False
  331. except RuntimeError as e:
  332. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  333. try:
  334. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
  335. data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
  336. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  337. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  338. pass
  339. assert False
  340. except RuntimeError as e:
  341. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  342. try:
  343. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
  344. data1 = data1.map(operations=exception_func, input_columns=["bbox"], num_parallel_workers=1)
  345. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  346. pass
  347. assert False
  348. except RuntimeError as e:
  349. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  350. try:
  351. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
  352. data1 = data1.map(operations=exception_func, input_columns=["category_id"], num_parallel_workers=1)
  353. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  354. pass
  355. assert False
  356. except RuntimeError as e:
  357. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  358. try:
  359. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
  360. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  361. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  362. pass
  363. assert False
  364. except RuntimeError as e:
  365. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  366. try:
  367. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
  368. data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
  369. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  370. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  371. pass
  372. assert False
  373. except RuntimeError as e:
  374. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  375. try:
  376. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
  377. data1 = data1.map(operations=exception_func, input_columns=["segmentation"], num_parallel_workers=1)
  378. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  379. pass
  380. assert False
  381. except RuntimeError as e:
  382. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  383. try:
  384. data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
  385. data1 = data1.map(operations=exception_func, input_columns=["iscrowd"], num_parallel_workers=1)
  386. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  387. pass
  388. assert False
  389. except RuntimeError as e:
  390. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  391. try:
  392. data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
  393. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  394. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  395. pass
  396. assert False
  397. except RuntimeError as e:
  398. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  399. try:
  400. data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
  401. data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
  402. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  403. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  404. pass
  405. assert False
  406. except RuntimeError as e:
  407. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  408. try:
  409. data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
  410. data1 = data1.map(operations=exception_func, input_columns=["keypoints"], num_parallel_workers=1)
  411. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  412. pass
  413. assert False
  414. except RuntimeError as e:
  415. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  416. try:
  417. data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
  418. data1 = data1.map(operations=exception_func, input_columns=["num_keypoints"], num_parallel_workers=1)
  419. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  420. pass
  421. assert False
  422. except RuntimeError as e:
  423. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  424. try:
  425. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
  426. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  427. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  428. pass
  429. assert False
  430. except RuntimeError as e:
  431. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  432. try:
  433. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
  434. data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
  435. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  436. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  437. pass
  438. assert False
  439. except RuntimeError as e:
  440. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  441. try:
  442. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
  443. data1 = data1.map(operations=exception_func, input_columns=["bbox"], num_parallel_workers=1)
  444. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  445. pass
  446. assert False
  447. except RuntimeError as e:
  448. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  449. try:
  450. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
  451. data1 = data1.map(operations=exception_func, input_columns=["category_id"], num_parallel_workers=1)
  452. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  453. pass
  454. assert False
  455. except RuntimeError as e:
  456. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  457. try:
  458. data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
  459. data1 = data1.map(operations=exception_func, input_columns=["area"], num_parallel_workers=1)
  460. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  461. pass
  462. assert False
  463. except RuntimeError as e:
  464. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  465. try:
  466. data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning")
  467. data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
  468. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  469. pass
  470. assert False
  471. except RuntimeError as e:
  472. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  473. try:
  474. data1 = ds.CocoDataset(DATA_DIR, annotation_file=CAPTIONS_FILE, task="Captioning")
  475. data1 = data1.map(operations=exception_func, input_columns=["captions"], num_parallel_workers=1)
  476. for _ in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
  477. pass
  478. assert False
  479. except RuntimeError as e:
  480. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  481. if __name__ == '__main__':
  482. test_coco_captioning()
  483. test_coco_detection()
  484. test_coco_stuff()
  485. test_coco_keypoint()
  486. test_coco_panoptic()
  487. test_coco_detection_classindex()
  488. test_coco_panootic_classindex()
  489. test_coco_case_0()
  490. test_coco_case_1()
  491. test_coco_case_2()
  492. test_coco_case_3()
  493. test_coco_case_exception()