You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_onehot_op.py 7.2 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Testing the OneHot Op
  17. """
  18. import numpy as np
  19. import mindspore.dataset as ds
  20. import mindspore.dataset.transforms.c_transforms as data_trans
  21. import mindspore.dataset.transforms.py_transforms as py_trans
  22. import mindspore.dataset.vision.c_transforms as c_vision
  23. from mindspore import log as logger
  24. from util import dataset_equal_with_function
  25. DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  26. SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  27. def one_hot(index, depth):
  28. """
  29. Apply the one_hot
  30. """
  31. arr = np.zeros([1, depth], dtype=np.int32)
  32. arr[0, index] = 1
  33. return arr
  34. def test_one_hot():
  35. """
  36. Test OneHot Tensor Operator
  37. """
  38. logger.info("test_one_hot")
  39. depth = 10
  40. # First dataset
  41. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, shuffle=False)
  42. one_hot_op = data_trans.OneHot(num_classes=depth)
  43. data1 = data1.map(operations=one_hot_op, input_columns=["label"], column_order=["label"])
  44. # Second dataset
  45. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["label"], shuffle=False)
  46. assert dataset_equal_with_function(data1, data2, 0, one_hot, depth)
  47. def test_one_hot_post_aug():
  48. """
  49. Test One Hot Encoding after Multiple Data Augmentation Operators
  50. """
  51. logger.info("test_one_hot_post_aug")
  52. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, shuffle=False)
  53. # Define data augmentation parameters
  54. rescale = 1.0 / 255.0
  55. shift = 0.0
  56. resize_height, resize_width = 224, 224
  57. # Define map operations
  58. decode_op = c_vision.Decode()
  59. rescale_op = c_vision.Rescale(rescale, shift)
  60. resize_op = c_vision.Resize((resize_height, resize_width))
  61. # Apply map operations on images
  62. data1 = data1.map(operations=decode_op, input_columns=["image"])
  63. data1 = data1.map(operations=rescale_op, input_columns=["image"])
  64. data1 = data1.map(operations=resize_op, input_columns=["image"])
  65. # Apply one-hot encoding on labels
  66. depth = 4
  67. one_hot_encode = data_trans.OneHot(depth)
  68. data1 = data1.map(operations=one_hot_encode, input_columns=["label"])
  69. # Apply datasets ops
  70. buffer_size = 100
  71. seed = 10
  72. batch_size = 2
  73. ds.config.set_seed(seed)
  74. data1 = data1.shuffle(buffer_size=buffer_size)
  75. data1 = data1.batch(batch_size, drop_remainder=True)
  76. num_iter = 0
  77. for item in data1.create_dict_iterator(num_epochs=1):
  78. logger.info("image is: {}".format(item["image"]))
  79. logger.info("label is: {}".format(item["label"]))
  80. num_iter += 1
  81. assert num_iter == 1
  82. def test_one_hot_success():
  83. # success
  84. class GetDatasetGenerator:
  85. def __init__(self):
  86. np.random.seed(58)
  87. self.__data = np.random.sample((5, 2))
  88. self.__label = []
  89. for index in range(5):
  90. self.__label.append(np.array(index))
  91. def __getitem__(self, index):
  92. return (self.__data[index], self.__label[index])
  93. def __len__(self):
  94. return len(self.__data)
  95. dataset = ds.GeneratorDataset(GetDatasetGenerator(), ["data", "label"], shuffle=False)
  96. one_hot_encode = py_trans.OneHotOp(10)
  97. trans = py_trans.Compose([one_hot_encode])
  98. dataset = dataset.map(operations=trans, input_columns=["label"])
  99. for index, item in enumerate(dataset.create_dict_iterator(num_epochs=1, output_numpy=True)):
  100. assert item["label"][index] == 1.0
  101. def test_one_hot_success2():
  102. # success
  103. class GetDatasetGenerator:
  104. def __init__(self):
  105. np.random.seed(58)
  106. self.__data = np.random.sample((5, 2))
  107. self.__label = []
  108. for index in range(5):
  109. self.__label.append(np.array([index]))
  110. def __getitem__(self, index):
  111. return (self.__data[index], self.__label[index])
  112. def __len__(self):
  113. return len(self.__data)
  114. dataset = ds.GeneratorDataset(GetDatasetGenerator(), ["data", "label"], shuffle=False)
  115. one_hot_encode = py_trans.OneHotOp(10)
  116. trans = py_trans.Compose([one_hot_encode])
  117. dataset = dataset.map(operations=trans, input_columns=["label"])
  118. for index, item in enumerate(dataset.create_dict_iterator(num_epochs=1, output_numpy=True)):
  119. logger.info(item)
  120. assert item["label"][0][index] == 1.0
  121. def test_one_hot_success3():
  122. # success
  123. class GetDatasetGenerator:
  124. def __init__(self):
  125. np.random.seed(58)
  126. self.__data = np.random.sample((5, 2))
  127. self.__label = []
  128. for _ in range(5):
  129. value = np.ones([10, 1], dtype=np.int32)
  130. for i in range(10):
  131. value[i][0] = i
  132. self.__label.append(value)
  133. def __getitem__(self, index):
  134. return (self.__data[index], self.__label[index])
  135. def __len__(self):
  136. return len(self.__data)
  137. dataset = ds.GeneratorDataset(GetDatasetGenerator(), ["data", "label"], shuffle=False)
  138. one_hot_encode = py_trans.OneHotOp(10)
  139. trans = py_trans.Compose([one_hot_encode])
  140. dataset = dataset.map(operations=trans, input_columns=["label"])
  141. for item in dataset.create_dict_iterator(num_epochs=1, output_numpy=True):
  142. logger.info(item)
  143. for i in range(10):
  144. assert item["label"][i][0][i] == 1.0
  145. def test_one_hot_type_error():
  146. # type error
  147. class GetDatasetGenerator:
  148. def __init__(self):
  149. np.random.seed(58)
  150. self.__data = np.random.sample((5, 2))
  151. self.__label = []
  152. for index in range(5):
  153. self.__label.append(np.array(float(index)))
  154. def __getitem__(self, index):
  155. return (self.__data[index], self.__label[index])
  156. def __len__(self):
  157. return len(self.__data)
  158. dataset = ds.GeneratorDataset(GetDatasetGenerator(), ["data", "label"], shuffle=False)
  159. one_hot_encode = py_trans.OneHotOp(10)
  160. trans = py_trans.Compose([one_hot_encode])
  161. dataset = dataset.map(operations=trans, input_columns=["label"])
  162. try:
  163. for index, item in enumerate(dataset.create_dict_iterator(num_epochs=1, output_numpy=True)):
  164. assert item["label"][index] == 1.0
  165. except RuntimeError as e:
  166. assert "the input numpy type should be int" in str(e)
  167. if __name__ == "__main__":
  168. test_one_hot()
  169. test_one_hot_post_aug()
  170. test_one_hot_success()
  171. test_one_hot_success2()
  172. test_one_hot_success3()
  173. test_one_hot_type_error()