/** * Copyright 2020 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include #include #include #include #include #include "utils/log_adapter.h" #include "common/utils.h" #include "common/common.h" #include "gtest/gtest.h" #include "securec.h" #include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/status.h" #include "minddata/dataset/include/transforms.h" #include "minddata/dataset/include/iterator.h" #include "minddata/dataset/core/constants.h" #include "minddata/dataset/core/tensor_shape.h" #include "minddata/dataset/core/tensor.h" #include "minddata/dataset/include/samplers.h" #include "minddata/dataset/engine/datasetops/source/voc_op.h" using namespace mindspore::dataset::api; using mindspore::MsLogLevel::ERROR; using mindspore::ExceptionType::NoExceptionType; using mindspore::LogStream; using mindspore::dataset::Tensor; using mindspore::dataset::TensorShape; using mindspore::dataset::TensorImpl; using mindspore::dataset::DataType; using mindspore::dataset::Status; using mindspore::dataset::BorderType; using mindspore::dataset::dsize_t; class MindDataTestPipeline : public UT::DatasetOpTesting { protected: }; TEST_F(MindDataTestPipeline, TestBatchAndRepeat) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestBatchAndRepeat."; // Create a Mnist Dataset std::string folder_path = datasets_root_path_ + "/testMnistData/"; std::shared_ptr ds = Mnist(folder_path, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 2; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestMnistFail1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFail1."; // Create a Mnist Dataset std::shared_ptr ds = Mnist("", RandomSampler(false, 10)); EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestTensorOpsAndMap) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTensorOpsAndMap."; // Create a Mnist Dataset std::string folder_path = datasets_root_path_ + "/testMnistData/"; std::shared_ptr ds = Mnist(folder_path, RandomSampler(false, 20)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr resize_op = vision::Resize({30, 30}); EXPECT_NE(resize_op, nullptr); std::shared_ptr center_crop_op = vision::CenterCrop({16, 16}); EXPECT_NE(center_crop_op, nullptr); // Create a Map operation on ds ds = ds->Map({resize_op, center_crop_op}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 40); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestUniformAugWithOps) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestUniformAugWithOps."; // Create a Mnist Dataset std::string folder_path = datasets_root_path_ + "/testMnistData/"; std::shared_ptr ds = Mnist(folder_path, RandomSampler(false, 20)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 1; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr resize_op = vision::Resize({30, 30}); EXPECT_NE(resize_op, nullptr); std::shared_ptr random_crop_op = vision::RandomCrop({28, 28}); EXPECT_NE(random_crop_op, nullptr); std::shared_ptr center_crop_op = vision::CenterCrop({16, 16}); EXPECT_NE(center_crop_op, nullptr); std::shared_ptr uniform_aug_op = vision::UniformAugment({random_crop_op, center_crop_op}, 2); EXPECT_NE(uniform_aug_op, nullptr); // Create a Map operation on ds ds = ds->Map({resize_op, uniform_aug_op}); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestRandomFlip) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomFlip."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr random_vertical_flip_op = vision::RandomVerticalFlip(0.5); EXPECT_NE(random_vertical_flip_op, nullptr); std::shared_ptr random_horizontal_flip_op = vision::RandomHorizontalFlip(0.5); EXPECT_NE(random_horizontal_flip_op, nullptr); // Create a Map operation on ds ds = ds->Map({random_vertical_flip_op, random_horizontal_flip_op}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestImageFolderBatchAndRepeat) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderBatchAndRepeat."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 2; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestImageFolderFail1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFail1."; // Create an ImageFolder Dataset std::shared_ptr ds = ImageFolder("", true, nullptr); EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestImageFolderWithSamplers) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderWithSamplers."; std::shared_ptr sampl = DistributedSampler(2, 1); EXPECT_NE(sampl, nullptr); sampl = PKSampler(3); EXPECT_NE(sampl, nullptr); sampl = RandomSampler(false, 12); EXPECT_NE(sampl, nullptr); sampl = SequentialSampler(0, 12); EXPECT_NE(sampl, nullptr); std::vector weights = {0.9, 0.8, 0.68, 0.7, 0.71, 0.6, 0.5, 0.4, 0.3, 0.5, 0.2, 0.1}; sampl = WeightedRandomSampler(weights, 12); EXPECT_NE(sampl, nullptr); std::vector indices = {1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23}; sampl = SubsetRandomSampler(indices); EXPECT_NE(sampl, nullptr); // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, false, sampl); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 2; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 12); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestPad) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestPad."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr pad_op1 = vision::Pad({1, 2, 3, 4}, {0}, BorderType::kSymmetric); EXPECT_NE(pad_op1, nullptr); std::shared_ptr pad_op2 = vision::Pad({1}, {1, 1, 1}, BorderType::kEdge); EXPECT_NE(pad_op2, nullptr); std::shared_ptr pad_op3 = vision::Pad({1, 4}); EXPECT_NE(pad_op3, nullptr); // Create a Map operation on ds ds = ds->Map({pad_op1, pad_op2, pad_op3}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCutOut) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCutOut."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr cut_out1 = vision::CutOut(30, 5); EXPECT_NE(cut_out1, nullptr); std::shared_ptr cut_out2 = vision::CutOut(30); EXPECT_NE(cut_out2, nullptr); // Create a Map operation on ds ds = ds->Map({cut_out1, cut_out2}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestNormalize) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestNormalize."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr normalize = vision::Normalize({121.0, 115.0, 100.0}, {70.0, 68.0, 71.0}); EXPECT_NE(normalize, nullptr); // Create a Map operation on ds ds = ds->Map({normalize}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestDecode) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestDecode."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, false, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr decode = vision::Decode(true); EXPECT_NE(decode, nullptr); // Create a Map operation on ds ds = ds->Map({decode}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestShuffleDataset) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestShuffleDataset."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Shuffle operation on ds int32_t shuffle_size = 10; ds = ds->Shuffle(shuffle_size); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 2; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestSkipDataset) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestSkipDataset."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Skip operation on ds int32_t count = 3; ds = ds->Skip(count); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } MS_LOG(INFO) << "Number of rows: " << i; // Expect 10-3=7 rows EXPECT_EQ(i, 7); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestSkipDatasetError1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestSkipDatasetError1."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Skip operation on ds with invalid count input int32_t count = -1; ds = ds->Skip(count); // Expect nullptr for invalid input skip_count EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestTakeDatasetDefault) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTakeDatasetDefault."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 7)); EXPECT_NE(ds, nullptr); // Create a Take operation on ds, dafault count = -1 ds = ds->Take(); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } MS_LOG(INFO) << "Number of rows: " << i; // Expect 7 rows EXPECT_EQ(i, 7); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestTakeDatasetNormal) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTakeDatasetNormal."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 8)); EXPECT_NE(ds, nullptr); // Create a Take operation on ds ds = ds->Take(5); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } MS_LOG(INFO) << "Number of rows: " << i; // Expect 5 rows EXPECT_EQ(i, 5); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestTakeDatasetError1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTakeDatasetError1."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Take operation on ds with invalid count input int32_t count = -5; ds = ds->Take(count); // Expect nullptr for invalid input take_count EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestCifar10Dataset) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10Dataset."; // Create a Cifar10 Dataset std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; std::shared_ptr ds = Cifar10(folder_path, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); EXPECT_NE(row.find("image"), row.end()); EXPECT_NE(row.find("label"), row.end()); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCifar10DatasetFail1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail1."; // Create a Cifar10 Dataset std::shared_ptr ds = Cifar10("", RandomSampler(false, 10)); EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestCifar100Dataset) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100Dataset."; // Create a Cifar100 Dataset std::string folder_path = datasets_root_path_ + "/testCifar100Data/"; std::shared_ptr ds = Cifar100(folder_path, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); EXPECT_NE(row.find("image"), row.end()); EXPECT_NE(row.find("coarse_label"), row.end()); EXPECT_NE(row.find("fine_label"), row.end()); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCifar100DatasetFail1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetFail1."; // Create a Cifar100 Dataset std::shared_ptr ds = Cifar100("", RandomSampler(false, 10)); EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestRandomColorAdjust) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomColorAdjust."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr random_color_adjust1 = vision::RandomColorAdjust({1.0}, {0.0}, {0.5}, {0.5}); EXPECT_NE(random_color_adjust1, nullptr); std::shared_ptr random_color_adjust2 = vision::RandomColorAdjust({1.0, 1.0}, {0.0, 0.0}, {0.5, 0.5}, {0.5, 0.5}); EXPECT_NE(random_color_adjust2, nullptr); std::shared_ptr random_color_adjust3 = vision::RandomColorAdjust({0.5, 1.0}, {0.0, 0.5}, {0.25, 0.5}, {0.25, 0.5}); EXPECT_NE(random_color_adjust3, nullptr); std::shared_ptr random_color_adjust4 = vision::RandomColorAdjust(); EXPECT_NE(random_color_adjust4, nullptr); // Create a Map operation on ds ds = ds->Map({random_color_adjust1, random_color_adjust2, random_color_adjust3, random_color_adjust4}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestRandomRotation) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRandomRotation."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr random_rotation_op = vision::RandomRotation({-180, 180}); EXPECT_NE(random_rotation_op, nullptr); // Create a Map operation on ds ds = ds->Map({random_rotation_op}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestProjectMap) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestProjectMap."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create objects for the tensor ops std::shared_ptr random_vertical_flip_op = vision::RandomVerticalFlip(0.5); EXPECT_NE(random_vertical_flip_op, nullptr); // Create a Map operation on ds ds = ds->Map({random_vertical_flip_op}, {}, {}, {"image", "label"}); EXPECT_NE(ds, nullptr); // Create a Project operation on ds std::vector column_project = {"image"}; ds = ds->Project(column_project); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestZipSuccess) { // Testing the member zip() function MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipSuccess."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Project operation on ds std::vector column_project = {"image"}; ds = ds->Project(column_project); EXPECT_NE(ds, nullptr); // Create an ImageFolder Dataset std::shared_ptr ds1 = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds1, nullptr); // Create a Rename operation on ds (so that the 3 datasets we are going to zip have distinct column names) ds1 = ds1->Rename({"image", "label"}, {"col1", "col2"}); EXPECT_NE(ds1, nullptr); folder_path = datasets_root_path_ + "/testCifar10Data/"; std::shared_ptr ds2 = Cifar10(folder_path, RandomSampler(false, 10)); EXPECT_NE(ds2, nullptr); // Create a Project operation on ds column_project = {"label"}; ds2 = ds2->Project(column_project); EXPECT_NE(ds2, nullptr); // Create a Zip operation on the datasets ds = ds->Zip({ds1, ds2}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); // Check zipped column names EXPECT_EQ(row.size(), 4); EXPECT_NE(row.find("image"), row.end()); EXPECT_NE(row.find("label"), row.end()); EXPECT_NE(row.find("col1"), row.end()); EXPECT_NE(row.find("col2"), row.end()); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 10); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestZipSuccess2) { // Testing the static zip() function MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipSuccess2."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 9)); EXPECT_NE(ds, nullptr); std::shared_ptr ds2 = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds2, nullptr); // Create a Rename operation on ds (so that the 2 datasets we are going to zip have distinct column names) ds = ds->Rename({"image", "label"}, {"col1", "col2"}); EXPECT_NE(ds, nullptr); // Create a Zip operation on the datasets ds = Zip({ds, ds2}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); // Check zipped column names EXPECT_EQ(row.size(), 4); EXPECT_NE(row.find("image"), row.end()); EXPECT_NE(row.find("label"), row.end()); EXPECT_NE(row.find("col1"), row.end()); EXPECT_NE(row.find("col2"), row.end()); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 9); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestZipFail) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipFail."; // We expect this test to fail because we are the both datasets we are zipping have "image" and "label" columns // and zip doesn't accept datasets with same column names // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create an ImageFolder Dataset std::shared_ptr ds1 = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds1, nullptr); // Create a Zip operation on the datasets ds = Zip({ds, ds1}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_EQ(iter, nullptr); } TEST_F(MindDataTestPipeline, TestZipFail2) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestZipFail2."; // This case is expected to fail because the input dataset is empty. // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Zip operation on the datasets // Input dataset to zip is empty ds = Zip({}); EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestRenameSuccess) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRenameSuccess."; // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create a Rename operation on ds ds = ds->Rename({"image", "label"}, {"col1", "col2"}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; EXPECT_NE(row.find("col1"), row.end()); EXPECT_NE(row.find("col2"), row.end()); EXPECT_EQ(row.find("image"), row.end()); EXPECT_EQ(row.find("label"), row.end()); while (row.size() != 0) { i++; auto image = row["col1"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 20); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestRenameFail) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestRenameFail."; // We expect this test to fail because input and output in Rename are not the same size // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create a Rename operation on ds ds = ds->Rename({"image", "label"}, {"col2"}); EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestVOCSegmentation) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentation."; // Create a VOC Dataset std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::shared_ptr ds = VOC(folder_path, "Segmentation", "train", {}, false, SequentialSampler(0, 3)); EXPECT_NE(ds, nullptr); // Create a Repeat operation on ds int32_t repeat_num = 2; ds = ds->Repeat(repeat_num); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); // Check if VOCOp read correct images/targets using Tensor = mindspore::dataset::Tensor; std::string expect_file[] = {"32", "33", "39", "32", "33", "39"}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto target = row["target"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); MS_LOG(INFO) << "Tensor target shape: " << target->shape(); std::shared_ptr expect_image; Tensor::CreateFromFile(folder_path + "/JPEGImages/" + expect_file[i] + ".jpg", &expect_image); EXPECT_EQ(*image, *expect_image); std::shared_ptr expect_target; Tensor::CreateFromFile(folder_path + "/SegmentationClass/" + expect_file[i] + ".png", &expect_target); EXPECT_EQ(*target, *expect_target); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 6); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestVOCSegmentationError1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentationError1."; // Create a VOC Dataset std::map class_index; class_index["car"] = 0; std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::shared_ptr ds = VOC(folder_path, "Segmentation", "train", class_index, false, RandomSampler(false, 6)); // Expect nullptr for segmentation task with class_index EXPECT_EQ(ds, nullptr); } TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrMode) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrMode."; // Create a VOC Dataset std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::shared_ptr ds_1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3)); // Expect nullptr for invalid task EXPECT_EQ(ds_1, nullptr); std::shared_ptr ds_2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4)); // Expect nullptr for invalid mode EXPECT_EQ(ds_2, nullptr); } TEST_F(MindDataTestPipeline, TestVOCDetection) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCDetection."; // Create a VOC Dataset std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::shared_ptr ds = VOC(folder_path, "Detection", "train", {}, false, SequentialSampler(0, 4)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); // Check if VOCOp read correct images/labels std::string expect_file[] = {"15", "32", "33", "39"}; uint32_t expect_num[] = {5, 5, 4, 3}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto label = row["label"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); MS_LOG(INFO) << "Tensor label shape: " << label->shape(); std::shared_ptr expect_image; Tensor::CreateFromFile(folder_path + "/JPEGImages/" + expect_file[i] + ".jpg", &expect_image); EXPECT_EQ(*image, *expect_image); std::shared_ptr expect_label; Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label); expect_label->SetItemAt({0, 0}, expect_num[i]); EXPECT_EQ(*label, *expect_label); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 4); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestVOCClassIndex) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCClassIndex."; // Create a VOC Dataset std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::map class_index; class_index["car"] = 0; class_index["cat"] = 1; class_index["train"] = 9; std::shared_ptr ds = VOC(folder_path, "Detection", "train", class_index, false, SequentialSampler(0, 6)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); // Check if VOCOp read correct labels // When we provide class_index, label of ["car","cat","train"] become [0,1,9] std::shared_ptr expect_label; Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label); uint32_t expect[] = {9, 9, 9, 1, 1, 0}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto label = row["label"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); MS_LOG(INFO) << "Tensor label shape: " << label->shape(); expect_label->SetItemAt({0, 0}, expect[i]); EXPECT_EQ(*label, *expect_label); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 6); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCocoDetection) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection."; // Create a Coco Dataset std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::shared_ptr ds = Coco(folder_path, annotation_file, "Detection", false, SequentialSampler(0, 6)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623", "000000309022"}; std::vector> expect_bbox_vector = {{10.0, 10.0, 10.0, 10.0, 70.0, 70.0, 70.0, 70.0}, {20.0, 20.0, 20.0, 20.0, 80.0, 80.0, 80.0, 80.0}, {30.0, 30.0, 30.0, 30.0}, {40.0, 40.0, 40.0, 40.0}, {50.0, 50.0, 50.0, 50.0}, {60.0, 60.0, 60.0, 60.0}}; std::vector> expect_catagoryid_list = {{1, 7}, {2, 8}, {3}, {4}, {5}, {6}}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto bbox = row["bbox"]; auto category_id = row["category_id"]; std::shared_ptr expect_image; Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image); EXPECT_EQ(*image, *expect_image); std::shared_ptr expect_bbox; dsize_t bbox_num = static_cast(expect_bbox_vector[i].size() / 4); Tensor::CreateFromVector(expect_bbox_vector[i], TensorShape({bbox_num, 4}), &expect_bbox); EXPECT_EQ(*bbox, *expect_bbox); std::shared_ptr expect_categoryid; Tensor::CreateFromVector(expect_catagoryid_list[i], TensorShape({bbox_num, 1}), &expect_categoryid); EXPECT_EQ(*category_id, *expect_categoryid); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 6); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCocoStuff) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoStuff."; // Create a Coco Dataset std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::shared_ptr ds = Coco(folder_path, annotation_file, "Stuff", false, SequentialSampler(0, 6)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623", "000000309022"}; std::vector> expect_segmentation_vector = {{10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 70.0, 72.0, 73.0, 74.0, 75.0, -1.0, -1.0, -1.0, -1.0, -1.0}, {20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0}, {40.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 40.0, 41.0, 42.0}, {50.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0}, {60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0}, {60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0}}; std::vector> expect_size = {{2, 10}, {2, 11}, {1, 12}, {1, 13}, {1, 14}, {2, 7}}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto segmentation = row["segmentation"]; auto iscrowd = row["iscrowd"]; std::shared_ptr expect_image; Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image); EXPECT_EQ(*image, *expect_image); std::shared_ptr expect_segmentation; Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &expect_segmentation); EXPECT_EQ(*segmentation, *expect_segmentation); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 6); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCocoKeypoint) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoKeypoint."; // Create a Coco Dataset std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/key_point.json"; std::shared_ptr ds = Coco(folder_path, annotation_file, "Keypoint", false, SequentialSampler(0, 2)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); std::string expect_file[] = {"000000391895", "000000318219"}; std::vector> expect_keypoint_vector = {{368.0, 61.0, 1.0, 369.0, 52.0, 2.0, 0.0, 0.0, 0.0, 382.0, 48.0, 2.0, 0.0, 0.0, 0.0, 368.0, 84.0, 2.0, 435.0, 81.0, 2.0, 362.0, 125.0, 2.0, 446.0, 125.0, 2.0, 360.0, 153.0, 2.0, 0.0, 0.0, 0.0, 397.0, 167.0, 1.0, 439.0, 166.0, 1.0, 369.0, 193.0, 2.0, 461.0, 234.0, 2.0, 361.0, 246.0, 2.0, 474.0, 287.0, 2.0}, {244.0, 139.0, 2.0, 0.0, 0.0, 0.0, 226.0, 118.0, 2.0, 0.0, 0.0, 0.0, 154.0, 159.0, 2.0, 143.0, 261.0, 2.0, 135.0, 312.0, 2.0, 271.0, 423.0, 2.0, 184.0, 530.0, 2.0, 261.0, 280.0, 2.0, 347.0, 592.0, 2.0, 0.0, 0.0, 0.0, 123.0, 596.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}}; std::vector> expect_size = {{1, 51}, {1, 51}}; std::vector> expect_num_keypoints_list = {{14}, {10}}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto keypoints = row["keypoints"]; auto num_keypoints = row["num_keypoints"]; std::shared_ptr expect_image; Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image); EXPECT_EQ(*image, *expect_image); std::shared_ptr expect_keypoints; dsize_t keypoints_size = expect_size[i][0]; Tensor::CreateFromVector(expect_keypoint_vector[i], TensorShape(expect_size[i]), &expect_keypoints); EXPECT_EQ(*keypoints, *expect_keypoints); std::shared_ptr expect_num_keypoints; Tensor::CreateFromVector(expect_num_keypoints_list[i], TensorShape({keypoints_size, 1}), &expect_num_keypoints); EXPECT_EQ(*num_keypoints, *expect_num_keypoints); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 2); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCocoPanoptic) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoPanoptic."; // Create a Coco Dataset std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/panoptic.json"; std::shared_ptr ds = Coco(folder_path, annotation_file, "Panoptic", false, SequentialSampler(0, 2)); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); std::string expect_file[] = {"000000391895", "000000574769"}; std::vector> expect_bbox_vector = {{472, 173, 36, 48, 340, 22, 154, 301, 486, 183, 30, 35}, {103, 133, 229, 422, 243, 175, 93, 164}}; std::vector> expect_categoryid_vector = {{1, 1, 2}, {1, 3}}; std::vector> expect_iscrowd_vector = {{0, 0, 0}, {0, 0}}; std::vector> expect_area_vector = {{705, 14062, 626}, {43102, 6079}}; std::vector> expect_size = {{3, 4}, {2, 4}}; uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto bbox = row["bbox"]; auto category_id = row["category_id"]; auto iscrowd = row["iscrowd"]; auto area = row["area"]; std::shared_ptr expect_image; Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image); EXPECT_EQ(*image, *expect_image); std::shared_ptr expect_bbox; dsize_t bbox_size = expect_size[i][0]; Tensor::CreateFromVector(expect_bbox_vector[i], TensorShape(expect_size[i]), &expect_bbox); EXPECT_EQ(*bbox, *expect_bbox); std::shared_ptr expect_categoryid; Tensor::CreateFromVector(expect_categoryid_vector[i], TensorShape({bbox_size, 1}), &expect_categoryid); EXPECT_EQ(*category_id, *expect_categoryid); std::shared_ptr expect_iscrowd; Tensor::CreateFromVector(expect_iscrowd_vector[i], TensorShape({bbox_size, 1}), &expect_iscrowd); EXPECT_EQ(*iscrowd, *expect_iscrowd); std::shared_ptr expect_area; Tensor::CreateFromVector(expect_area_vector[i], TensorShape({bbox_size, 1}), &expect_area); EXPECT_EQ(*area, *expect_area); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 2); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCocoDefault) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection."; // Create a Coco Dataset std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::shared_ptr ds = Coco(folder_path, annotation_file); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { auto image = row["image"]; auto bbox = row["bbox"]; auto category_id = row["category_id"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); MS_LOG(INFO) << "Tensor bbox shape: " << bbox->shape(); MS_LOG(INFO) << "Tensor category_id shape: " << category_id->shape(); iter->GetNextRow(&row); i++; } EXPECT_EQ(i, 6); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestCocoException) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection."; // Create a Coco Dataset std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::string invalid_folder_path = "./NotExist"; std::string invalid_annotation_file = "./NotExistFile"; std::shared_ptr ds = Coco(invalid_folder_path, annotation_file); EXPECT_EQ(ds, nullptr); std::shared_ptr ds1 = Coco(folder_path, invalid_annotation_file); EXPECT_EQ(ds1, nullptr); std::shared_ptr ds2 = Coco(folder_path, annotation_file, "valid_mode"); EXPECT_EQ(ds2, nullptr); } TEST_F(MindDataTestPipeline, TestConcatSuccess) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatSuccess."; // Create an ImageFolder Dataset // Column names: {"image", "label"} std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Cifar10 Dataset // Column names: {"image", "label"} folder_path = datasets_root_path_ + "/testCifar10Data/"; std::shared_ptr ds2 = Cifar10(folder_path, RandomSampler(false, 9)); EXPECT_NE(ds2, nullptr); // Create a Project operation on ds ds = ds->Project({"image"}); EXPECT_NE(ds, nullptr); ds2 = ds2->Project({"image"}); EXPECT_NE(ds, nullptr); // Create a Concat operation on the ds ds = ds->Concat({ds2}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 19); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestConcatSuccess2) { // Test "+" operator to concat two datasets MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatSuccess2."; // Create an ImageFolder Dataset // Column names: {"image", "label"} std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Cifar10 Dataset // Column names: {"image", "label"} folder_path = datasets_root_path_ + "/testCifar10Data/"; std::shared_ptr ds2 = Cifar10(folder_path, RandomSampler(false, 9)); EXPECT_NE(ds2, nullptr); // Create a Project operation on ds ds = ds->Project({"image"}); EXPECT_NE(ds, nullptr); ds2 = ds2->Project({"image"}); EXPECT_NE(ds, nullptr); // Create a Concat operation on the ds ds = ds + ds2; EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_NE(iter, nullptr); // Iterate the dataset and get each row std::unordered_map> row; iter->GetNextRow(&row); uint64_t i = 0; while (row.size() != 0) { i++; auto image = row["image"]; MS_LOG(INFO) << "Tensor image shape: " << image->shape(); iter->GetNextRow(&row); } EXPECT_EQ(i, 19); // Manually terminate the pipeline iter->Stop(); } TEST_F(MindDataTestPipeline, TestConcatFail1) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatFail1."; // This case is expected to fail because the input column names of concatenated datasets are not the same // Create an ImageFolder Dataset // Column names: {"image", "label"} std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); std::shared_ptr ds2 = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Rename operation on ds ds2 = ds2->Rename({"image", "label"}, {"col1", "col2"}); EXPECT_NE(ds, nullptr); // Create a Project operation on the ds // Name of datasets to concat doesn't not match ds = ds->Concat({ds2}); EXPECT_NE(ds, nullptr); // Create a Batch operation on ds int32_t batch_size = 1; ds = ds->Batch(batch_size); EXPECT_NE(ds, nullptr); // Create an iterator over the result of the above dataset // This will trigger the creation of the Execution Tree and launch it. std::shared_ptr iter = ds->CreateIterator(); EXPECT_EQ(iter, nullptr); } TEST_F(MindDataTestPipeline, TestConcatFail2) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestConcatFail2."; // This case is expected to fail because the input dataset is empty. // Create an ImageFolder Dataset std::string folder_path = datasets_root_path_ + "/testPK/data/"; std::shared_ptr ds = ImageFolder(folder_path, true, RandomSampler(false, 10)); EXPECT_NE(ds, nullptr); // Create a Project operation on the ds // Input dataset to concat is empty ds = ds->Concat({}); EXPECT_EQ(ds, nullptr); }