| @@ -352,7 +352,7 @@ std::shared_ptr<AlbumNode> Album(const std::string &dataset_dir, const std::stri | |||||
| const std::shared_ptr<SamplerObj> &sampler) { | const std::shared_ptr<SamplerObj> &sampler) { | ||||
| auto ds = std::make_shared<AlbumNode>(dataset_dir, data_schema, column_names, decode, sampler); | auto ds = std::make_shared<AlbumNode>(dataset_dir, data_schema, column_names, decode, sampler); | ||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a CelebANode. | // Function to create a CelebANode. | ||||
| @@ -362,8 +362,7 @@ std::shared_ptr<CelebANode> CelebA(const std::string &dataset_dir, const std::st | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<CelebANode>(dataset_dir, usage, sampler, decode, extensions, cache); | auto ds = std::make_shared<CelebANode>(dataset_dir, usage, sampler, decode, extensions, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a Cifar10Node. | // Function to create a Cifar10Node. | ||||
| @@ -372,8 +371,7 @@ std::shared_ptr<Cifar10Node> Cifar10(const std::string &dataset_dir, const std:: | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<Cifar10Node>(dataset_dir, usage, sampler, cache); | auto ds = std::make_shared<Cifar10Node>(dataset_dir, usage, sampler, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a Cifar100Node. | // Function to create a Cifar100Node. | ||||
| @@ -382,8 +380,7 @@ std::shared_ptr<Cifar100Node> Cifar100(const std::string &dataset_dir, const std | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<Cifar100Node>(dataset_dir, usage, sampler, cache); | auto ds = std::make_shared<Cifar100Node>(dataset_dir, usage, sampler, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a CLUENode. | // Function to create a CLUENode. | ||||
| @@ -392,8 +389,7 @@ std::shared_ptr<CLUENode> CLUE(const std::vector<std::string> &clue_files, const | |||||
| int32_t shard_id, const std::shared_ptr<DatasetCache> &cache) { | int32_t shard_id, const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<CLUENode>(clue_files, task, usage, num_samples, shuffle, num_shards, shard_id, cache); | auto ds = std::make_shared<CLUENode>(clue_files, task, usage, num_samples, shuffle, num_shards, shard_id, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a CocoNode. | // Function to create a CocoNode. | ||||
| @@ -402,8 +398,7 @@ std::shared_ptr<CocoNode> Coco(const std::string &dataset_dir, const std::string | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<CocoNode>(dataset_dir, annotation_file, task, decode, sampler, cache); | auto ds = std::make_shared<CocoNode>(dataset_dir, annotation_file, task, decode, sampler, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a CSVNode. | // Function to create a CSVNode. | ||||
| @@ -414,8 +409,7 @@ std::shared_ptr<CSVNode> CSV(const std::vector<std::string> &dataset_files, char | |||||
| auto ds = std::make_shared<CSVNode>(dataset_files, field_delim, column_defaults, column_names, num_samples, shuffle, | auto ds = std::make_shared<CSVNode>(dataset_files, field_delim, column_defaults, column_names, num_samples, shuffle, | ||||
| num_shards, shard_id, cache); | num_shards, shard_id, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a ImageFolderNode. | // Function to create a ImageFolderNode. | ||||
| @@ -431,8 +425,7 @@ std::shared_ptr<ImageFolderNode> ImageFolder(const std::string &dataset_dir, boo | |||||
| auto ds = | auto ds = | ||||
| std::make_shared<ImageFolderNode>(dataset_dir, decode, sampler, recursive, extensions, class_indexing, cache); | std::make_shared<ImageFolderNode>(dataset_dir, decode, sampler, recursive, extensions, class_indexing, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| #ifndef ENABLE_ANDROID | #ifndef ENABLE_ANDROID | ||||
| @@ -443,8 +436,7 @@ std::shared_ptr<ManifestNode> Manifest(const std::string &dataset_file, const st | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<ManifestNode>(dataset_file, usage, sampler, class_indexing, decode, cache); | auto ds = std::make_shared<ManifestNode>(dataset_file, usage, sampler, class_indexing, decode, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a MindDataNode. | // Function to create a MindDataNode. | ||||
| @@ -453,8 +445,7 @@ std::shared_ptr<MindDataNode> MindData(const std::string &dataset_file, const st | |||||
| int64_t num_padded) { | int64_t num_padded) { | ||||
| auto ds = std::make_shared<MindDataNode>(dataset_file, columns_list, sampler, padded_sample, num_padded); | auto ds = std::make_shared<MindDataNode>(dataset_file, columns_list, sampler, padded_sample, num_padded); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a MindDataNode. | // Function to create a MindDataNode. | ||||
| @@ -464,8 +455,7 @@ std::shared_ptr<MindDataNode> MindData(const std::vector<std::string> &dataset_f | |||||
| int64_t num_padded) { | int64_t num_padded) { | ||||
| auto ds = std::make_shared<MindDataNode>(dataset_files, columns_list, sampler, padded_sample, num_padded); | auto ds = std::make_shared<MindDataNode>(dataset_files, columns_list, sampler, padded_sample, num_padded); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| #endif | #endif | ||||
| @@ -475,8 +465,7 @@ std::shared_ptr<MnistNode> Mnist(const std::string &dataset_dir, const std::stri | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<MnistNode>(dataset_dir, usage, sampler, cache); | auto ds = std::make_shared<MnistNode>(dataset_dir, usage, sampler, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to overload "+" operator to concat two datasets | // Function to overload "+" operator to concat two datasets | ||||
| @@ -484,8 +473,7 @@ std::shared_ptr<ConcatNode> operator+(const std::shared_ptr<Dataset> &datasets1, | |||||
| const std::shared_ptr<Dataset> &datasets2) { | const std::shared_ptr<Dataset> &datasets2) { | ||||
| std::shared_ptr<ConcatNode> ds = std::make_shared<ConcatNode>(std::vector({datasets2, datasets1})); | std::shared_ptr<ConcatNode> ds = std::make_shared<ConcatNode>(std::vector({datasets2, datasets1})); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a TextFileNode. | // Function to create a TextFileNode. | ||||
| @@ -494,8 +482,7 @@ std::shared_ptr<TextFileNode> TextFile(const std::vector<std::string> &dataset_f | |||||
| const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<TextFileNode>(dataset_files, num_samples, shuffle, num_shards, shard_id, cache); | auto ds = std::make_shared<TextFileNode>(dataset_files, num_samples, shuffle, num_shards, shard_id, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| #ifndef ENABLE_ANDROID | #ifndef ENABLE_ANDROID | ||||
| @@ -505,8 +492,7 @@ std::shared_ptr<VOCNode> VOC(const std::string &dataset_dir, const std::string & | |||||
| const std::shared_ptr<SamplerObj> &sampler, const std::shared_ptr<DatasetCache> &cache) { | const std::shared_ptr<SamplerObj> &sampler, const std::shared_ptr<DatasetCache> &cache) { | ||||
| auto ds = std::make_shared<VOCNode>(dataset_dir, task, usage, class_indexing, decode, sampler, cache); | auto ds = std::make_shared<VOCNode>(dataset_dir, task, usage, class_indexing, decode, sampler, cache); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| #endif | #endif | ||||
| @@ -514,8 +500,7 @@ std::shared_ptr<VOCNode> VOC(const std::string &dataset_dir, const std::string & | |||||
| std::shared_ptr<ZipNode> Zip(const std::vector<std::shared_ptr<Dataset>> &datasets) { | std::shared_ptr<ZipNode> Zip(const std::vector<std::shared_ptr<Dataset>> &datasets) { | ||||
| auto ds = std::make_shared<ZipNode>(datasets); | auto ds = std::make_shared<ZipNode>(datasets); | ||||
| // Call derived class validation method. | |||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // FUNCTIONS TO CREATE DATASETS FOR DATASET OPS | // FUNCTIONS TO CREATE DATASETS FOR DATASET OPS | ||||
| @@ -529,10 +514,6 @@ std::shared_ptr<BatchNode> Dataset::Batch(int32_t batch_size, bool drop_remainde | |||||
| bool pad = false; | bool pad = false; | ||||
| auto ds = std::make_shared<BatchNode>(shared_from_this(), batch_size, drop_remainder, pad, cols_to_map, pad_map); | auto ds = std::make_shared<BatchNode>(shared_from_this(), batch_size, drop_remainder, pad, cols_to_map, pad_map); | ||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -547,10 +528,6 @@ std::shared_ptr<BucketBatchByLengthNode> Dataset::BucketBatchByLength( | |||||
| bucket_batch_sizes, element_length_function, pad_info, | bucket_batch_sizes, element_length_function, pad_info, | ||||
| pad_to_bucket_boundary, drop_remainder); | pad_to_bucket_boundary, drop_remainder); | ||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -617,7 +594,7 @@ std::shared_ptr<ConcatNode> Dataset::Concat(const std::vector<std::shared_ptr<Da | |||||
| auto ds = std::make_shared<ConcatNode>(datasets); | auto ds = std::make_shared<ConcatNode>(datasets); | ||||
| ds->children.push_back(shared_from_this()); | ds->children.push_back(shared_from_this()); | ||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| // Function to create a Map dataset. | // Function to create a Map dataset. | ||||
| @@ -628,20 +605,12 @@ std::shared_ptr<MapNode> Dataset::Map(std::vector<std::shared_ptr<TensorOperatio | |||||
| auto ds = | auto ds = | ||||
| std::make_shared<MapNode>(shared_from_this(), operations, input_columns, output_columns, project_columns, cache); | std::make_shared<MapNode>(shared_from_this(), operations, input_columns, output_columns, project_columns, cache); | ||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| // Function to create a ProjectNode. | // Function to create a ProjectNode. | ||||
| std::shared_ptr<ProjectNode> Dataset::Project(const std::vector<std::string> &columns) { | std::shared_ptr<ProjectNode> Dataset::Project(const std::vector<std::string> &columns) { | ||||
| auto ds = std::make_shared<ProjectNode>(shared_from_this(), columns); | auto ds = std::make_shared<ProjectNode>(shared_from_this(), columns); | ||||
| // Call derived class validation method. | |||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -650,10 +619,6 @@ std::shared_ptr<ProjectNode> Dataset::Project(const std::vector<std::string> &co | |||||
| std::shared_ptr<RenameNode> Dataset::Rename(const std::vector<std::string> &input_columns, | std::shared_ptr<RenameNode> Dataset::Rename(const std::vector<std::string> &input_columns, | ||||
| const std::vector<std::string> &output_columns) { | const std::vector<std::string> &output_columns) { | ||||
| auto ds = std::make_shared<RenameNode>(shared_from_this(), input_columns, output_columns); | auto ds = std::make_shared<RenameNode>(shared_from_this(), input_columns, output_columns); | ||||
| // Call derived class validation method. | |||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -667,10 +632,6 @@ std::shared_ptr<Dataset> Dataset::Repeat(int32_t count) { | |||||
| auto ds = std::make_shared<RepeatNode>(shared_from_this(), count); | auto ds = std::make_shared<RepeatNode>(shared_from_this(), count); | ||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -679,10 +640,6 @@ std::shared_ptr<ShuffleNode> Dataset::Shuffle(int32_t buffer_size) { | |||||
| // Pass in reshuffle_each_epoch with true | // Pass in reshuffle_each_epoch with true | ||||
| auto ds = std::make_shared<ShuffleNode>(shared_from_this(), buffer_size, true); | auto ds = std::make_shared<ShuffleNode>(shared_from_this(), buffer_size, true); | ||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -690,11 +647,6 @@ std::shared_ptr<ShuffleNode> Dataset::Shuffle(int32_t buffer_size) { | |||||
| std::shared_ptr<SkipNode> Dataset::Skip(int32_t count) { | std::shared_ptr<SkipNode> Dataset::Skip(int32_t count) { | ||||
| auto ds = std::make_shared<SkipNode>(shared_from_this(), count); | auto ds = std::make_shared<SkipNode>(shared_from_this(), count); | ||||
| // Call derived class validation method. | |||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -708,11 +660,6 @@ std::shared_ptr<Dataset> Dataset::Take(int32_t count) { | |||||
| auto ds = std::make_shared<TakeNode>(shared_from_this(), count); | auto ds = std::make_shared<TakeNode>(shared_from_this(), count); | ||||
| // Call derived class validation method. | |||||
| if (!ds->ValidateParams()) { | |||||
| return nullptr; | |||||
| } | |||||
| return ds; | return ds; | ||||
| } | } | ||||
| @@ -722,8 +669,9 @@ std::shared_ptr<ZipNode> Dataset::Zip(const std::vector<std::shared_ptr<Dataset> | |||||
| auto ds = std::make_shared<ZipNode>(datasets); | auto ds = std::make_shared<ZipNode>(datasets); | ||||
| ds->children.push_back(shared_from_this()); | ds->children.push_back(shared_from_this()); | ||||
| return ds->ValidateParams() ? ds : nullptr; | |||||
| return ds; | |||||
| } | } | ||||
| Status Dataset::AddCacheOp(std::vector<std::shared_ptr<DatasetOp>> *node_ops) { | Status Dataset::AddCacheOp(std::vector<std::shared_ptr<DatasetOp>> *node_ops) { | ||||
| if (cache_ != nullptr) { | if (cache_ != nullptr) { | ||||
| RETURN_IF_NOT_OK(cache_->Build()); | RETURN_IF_NOT_OK(cache_->Build()); | ||||
| @@ -162,30 +162,42 @@ TEST_F(MindDataTestPipeline, TestAlbumError) { | |||||
| std::string folder_path = datasets_root_path_ + "/testAlbum/ima"; | std::string folder_path = datasets_root_path_ + "/testAlbum/ima"; | ||||
| std::string schema_file = datasets_root_path_ + "/testAlbum/datasetSchema.json"; | std::string schema_file = datasets_root_path_ + "/testAlbum/datasetSchema.json"; | ||||
| std::vector<std::string> column_names = {"image", "label", "id"}; | std::vector<std::string> column_names = {"image", "label", "id"}; | ||||
| // Create a Album Dataset | |||||
| // Create an Album Dataset | |||||
| std::shared_ptr<Dataset> ds = Album(folder_path, schema_file, column_names, true, SequentialSampler(0, 1)); | std::shared_ptr<Dataset> ds = Album(folder_path, schema_file, column_names, true, SequentialSampler(0, 1)); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Album input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestAlbumWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestAlbumWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestAlbumWithNullSamplerError) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestAlbumWithNullSamplerError."; | |||||
| std::string folder_path = datasets_root_path_ + "/testAlbum/images"; | std::string folder_path = datasets_root_path_ + "/testAlbum/images"; | ||||
| std::string schema_file = datasets_root_path_ + "/testAlbum/datasetSchema.json"; | std::string schema_file = datasets_root_path_ + "/testAlbum/datasetSchema.json"; | ||||
| std::vector<std::string> column_names = {"image", "label", "id"}; | std::vector<std::string> column_names = {"image", "label", "id"}; | ||||
| // Create a Album Dataset | |||||
| // Create an Album Dataset | |||||
| std::shared_ptr<Dataset> ds = Album(folder_path, schema_file, column_names, true, nullptr); | std::shared_ptr<Dataset> ds = Album(folder_path, schema_file, column_names, true, nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Album input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestAlbumDuplicateColumnName) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestAlbumDuplicateColumnName."; | |||||
| TEST_F(MindDataTestPipeline, TestAlbumDuplicateColumnNameError) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestAlbumDuplicateColumnNameError."; | |||||
| std::string folder_path = datasets_root_path_ + "/testAlbum/images"; | std::string folder_path = datasets_root_path_ + "/testAlbum/images"; | ||||
| std::string schema_file = datasets_root_path_ + "/testAlbum/datasetSchema.json"; | std::string schema_file = datasets_root_path_ + "/testAlbum/datasetSchema.json"; | ||||
| std::vector<std::string> column_names = {"image", "image", "id"}; | std::vector<std::string> column_names = {"image", "image", "id"}; | ||||
| // Create a Album Dataset | |||||
| // Create an Album Dataset | |||||
| std::shared_ptr<Dataset> ds = Album(folder_path, schema_file, column_names, true); | std::shared_ptr<Dataset> ds = Album(folder_path, schema_file, column_names, true); | ||||
| // Expect failure: duplicate column names | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Album input, duplicate column names | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| @@ -168,58 +168,84 @@ TEST_F(MindDataTestPipeline, TestCifar100GetDatasetSize) { | |||||
| EXPECT_EQ(ds->GetDatasetSize(), 10); | EXPECT_EQ(ds->GetDatasetSize(), 10); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCifar100DatasetFail1) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetFail1."; | |||||
| TEST_F(MindDataTestPipeline, TestCifar100DatasetFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetFail."; | |||||
| // Create a Cifar100 Dataset | // Create a Cifar100 Dataset | ||||
| std::shared_ptr<Dataset> ds = Cifar100("", "all", RandomSampler(false, 10)); | std::shared_ptr<Dataset> ds = Cifar100("", "all", RandomSampler(false, 10)); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Cifar100 input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCifar10DatasetFail1) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail1."; | |||||
| TEST_F(MindDataTestPipeline, TestCifar10DatasetFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail."; | |||||
| // Create a Cifar10 Dataset | // Create a Cifar10 Dataset | ||||
| std::shared_ptr<Dataset> ds = Cifar10("", "all", RandomSampler(false, 10)); | std::shared_ptr<Dataset> ds = Cifar10("", "all", RandomSampler(false, 10)); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Cifar10 input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCifar10DatasetWithInvalidUsage) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestCifar10DatasetWithInvalidUsageFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetWithNullSamplerFail."; | |||||
| // Create a Cifar10 Dataset | // Create a Cifar10 Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; | std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; | ||||
| std::shared_ptr<Dataset> ds = Cifar10(folder_path, "validation"); | std::shared_ptr<Dataset> ds = Cifar10(folder_path, "validation"); | ||||
| // Expect failure: validation is not a valid usage | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Cifar10 input, validation is not a valid usage | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCifar10DatasetWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestCifar10DatasetWithNullSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetWithNullSamplerFail."; | |||||
| // Create a Cifar10 Dataset | // Create a Cifar10 Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; | std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; | ||||
| std::shared_ptr<Dataset> ds = Cifar10(folder_path, "all", nullptr); | std::shared_ptr<Dataset> ds = Cifar10(folder_path, "all", nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Cifar10 input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCifar100DatasetWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestCifar100DatasetWithNullSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetWithNullSamplerFail."; | |||||
| // Create a Cifar10 Dataset | |||||
| // Create a Cifar100 Dataset | |||||
| std::string folder_path = datasets_root_path_ + "/testCifar100Data/"; | std::string folder_path = datasets_root_path_ + "/testCifar100Data/"; | ||||
| std::shared_ptr<Dataset> ds = Cifar100(folder_path, "all", nullptr); | std::shared_ptr<Dataset> ds = Cifar100(folder_path, "all", nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Cifar100 input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCifar100DatasetWithWrongSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetWithWrongSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestCifar100DatasetWithWrongSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100DatasetWithWrongSamplerFail."; | |||||
| // Create a Cifar10 Dataset | |||||
| // Create a Cifar100 Dataset | |||||
| std::string folder_path = datasets_root_path_ + "/testCifar100Data/"; | std::string folder_path = datasets_root_path_ + "/testCifar100Data/"; | ||||
| std::shared_ptr<Dataset> ds = Cifar100(folder_path, "all", RandomSampler(false, -10)); | std::shared_ptr<Dataset> ds = Cifar100(folder_path, "all", RandomSampler(false, -10)); | ||||
| // Expect failure: sampler is not construnced correctly | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Cifar100 input, sampler is not constructed correctly | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| @@ -292,8 +292,8 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetDistribution) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCLUEDatasetException) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetException."; | |||||
| TEST_F(MindDataTestPipeline, TestCLUEDatasetFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetFail."; | |||||
| // Create a CLUE Dataset | // Create a CLUE Dataset | ||||
| std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json"; | std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json"; | ||||
| std::string task = "WSC"; | std::string task = "WSC"; | ||||
| @@ -301,28 +301,60 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetException) { | |||||
| std::string invalid_clue_file = "./NotExistFile"; | std::string invalid_clue_file = "./NotExistFile"; | ||||
| std::shared_ptr<Dataset> ds0 = CLUE({}, task, usage); | std::shared_ptr<Dataset> ds0 = CLUE({}, task, usage); | ||||
| EXPECT_EQ(ds0, nullptr); | |||||
| EXPECT_NE(ds0, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter0 = ds0->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter0, nullptr); | |||||
| std::shared_ptr<Dataset> ds1 = CLUE({invalid_clue_file}, task, usage); | std::shared_ptr<Dataset> ds1 = CLUE({invalid_clue_file}, task, usage); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| std::shared_ptr<Dataset> ds2 = CLUE({clue_file}, "invalid_task", usage); | std::shared_ptr<Dataset> ds2 = CLUE({clue_file}, "invalid_task", usage); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| std::shared_ptr<Dataset> ds3 = CLUE({clue_file}, task, "invalid_usage"); | std::shared_ptr<Dataset> ds3 = CLUE({clue_file}, task, "invalid_usage"); | ||||
| EXPECT_EQ(ds3, nullptr); | |||||
| EXPECT_NE(ds3, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter3 = ds3->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter3, nullptr); | |||||
| std::shared_ptr<Dataset> ds4 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 2, 2); | std::shared_ptr<Dataset> ds4 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 2, 2); | ||||
| EXPECT_EQ(ds4, nullptr); | |||||
| EXPECT_NE(ds4, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter4 = ds4->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter4, nullptr); | |||||
| std::shared_ptr<Dataset> ds5 = CLUE({clue_file}, task, usage, -1, ShuffleMode::kGlobal); | std::shared_ptr<Dataset> ds5 = CLUE({clue_file}, task, usage, -1, ShuffleMode::kGlobal); | ||||
| EXPECT_EQ(ds5, nullptr); | |||||
| EXPECT_NE(ds5, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter5 = ds5->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter5, nullptr); | |||||
| std::shared_ptr<Dataset> ds6 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, -1); | std::shared_ptr<Dataset> ds6 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, -1); | ||||
| EXPECT_EQ(ds6, nullptr); | |||||
| EXPECT_NE(ds6, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter6 = ds6->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter6, nullptr); | |||||
| std::shared_ptr<Dataset> ds7 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 0, -1); | std::shared_ptr<Dataset> ds7 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 0, -1); | ||||
| EXPECT_EQ(ds7, nullptr); | |||||
| EXPECT_NE(ds7, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter7 = ds7->CreateIterator(); | |||||
| // Expect failure: invalid CLUE input | |||||
| EXPECT_EQ(iter7, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) { | TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) { | ||||
| @@ -155,22 +155,34 @@ TEST_F(MindDataTestPipeline, TestCocoDetection) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCocoException) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoException."; | |||||
| TEST_F(MindDataTestPipeline, TestCocoFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoFail."; | |||||
| // Create a Coco Dataset | // Create a Coco Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testCOCO/train"; | std::string folder_path = datasets_root_path_ + "/testCOCO/train"; | ||||
| std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; | std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; | ||||
| std::string invalid_folder_path = "./NotExist"; | std::string invalid_folder_path = "./NotExist"; | ||||
| std::string invalid_annotation_file = "./NotExistFile"; | std::string invalid_annotation_file = "./NotExistFile"; | ||||
| std::shared_ptr<Dataset> ds = Coco(invalid_folder_path, annotation_file); | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| std::shared_ptr<Dataset> ds0 = Coco(invalid_folder_path, annotation_file); | |||||
| EXPECT_NE(ds0, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter0 = ds0->CreateIterator(); | |||||
| // Expect failure: invalid COCO input | |||||
| EXPECT_EQ(iter0, nullptr); | |||||
| std::shared_ptr<Dataset> ds1 = Coco(folder_path, invalid_annotation_file); | std::shared_ptr<Dataset> ds1 = Coco(folder_path, invalid_annotation_file); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid COCO input | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| std::shared_ptr<Dataset> ds2 = Coco(folder_path, annotation_file, "valid_mode"); | std::shared_ptr<Dataset> ds2 = Coco(folder_path, annotation_file, "valid_mode"); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid COCO input | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCocoKeypoint) { | TEST_F(MindDataTestPipeline, TestCocoKeypoint) { | ||||
| @@ -335,13 +347,17 @@ TEST_F(MindDataTestPipeline, TestCocoStuff) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCocoWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestCocoWithNullSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoWithNullSamplerFail."; | |||||
| // Create a Coco Dataset | // Create a Coco Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testCOCO/train"; | std::string folder_path = datasets_root_path_ + "/testCOCO/train"; | ||||
| std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; | std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; | ||||
| std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Detection", false, nullptr); | std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Detection", false, nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid COCO input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| @@ -359,8 +359,8 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetHeader) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCSVDatasetException) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCSVDatasetException."; | |||||
| TEST_F(MindDataTestPipeline, TestCSVDatasetFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCSVDatasetFail."; | |||||
| // Create a CSV Dataset | // Create a CSV Dataset | ||||
| std::string file = datasets_root_path_ + "/testCSV/1.csv"; | std::string file = datasets_root_path_ + "/testCSV/1.csv"; | ||||
| std::string invalid_csv_file = "./NotExistFile"; | std::string invalid_csv_file = "./NotExistFile"; | ||||
| @@ -368,27 +368,51 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetException) { | |||||
| // Test empty file list | // Test empty file list | ||||
| std::shared_ptr<Dataset> ds0 = CSV({}); | std::shared_ptr<Dataset> ds0 = CSV({}); | ||||
| EXPECT_EQ(ds0, nullptr); | |||||
| EXPECT_NE(ds0, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter0 = ds0->CreateIterator(); | |||||
| // Expect failure: invalid CSV input | |||||
| EXPECT_EQ(iter0, nullptr); | |||||
| // Test invalid file | // Test invalid file | ||||
| std::shared_ptr<Dataset> ds1 = CSV({invalid_csv_file}); | std::shared_ptr<Dataset> ds1 = CSV({invalid_csv_file}); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid CSV input | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Test invalid num_samples < -1 | // Test invalid num_samples < -1 | ||||
| std::shared_ptr<Dataset> ds2 = CSV({file}, ',', {}, column_names, -1); | std::shared_ptr<Dataset> ds2 = CSV({file}, ',', {}, column_names, -1); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid CSV input | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| // Test invalid num_shards < 1 | // Test invalid num_shards < 1 | ||||
| std::shared_ptr<Dataset> ds3 = CSV({file}, ',', {}, column_names, 0, ShuffleMode::kFalse, 0); | std::shared_ptr<Dataset> ds3 = CSV({file}, ',', {}, column_names, 0, ShuffleMode::kFalse, 0); | ||||
| EXPECT_EQ(ds3, nullptr); | |||||
| EXPECT_NE(ds3, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter3 = ds3->CreateIterator(); | |||||
| // Expect failure: invalid CSV input | |||||
| EXPECT_EQ(iter3, nullptr); | |||||
| // Test invalid shard_id >= num_shards | // Test invalid shard_id >= num_shards | ||||
| std::shared_ptr<Dataset> ds4 = CSV({file}, ',', {}, column_names, 0, ShuffleMode::kFalse, 2, 2); | std::shared_ptr<Dataset> ds4 = CSV({file}, ',', {}, column_names, 0, ShuffleMode::kFalse, 2, 2); | ||||
| EXPECT_EQ(ds4, nullptr); | |||||
| EXPECT_NE(ds4, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter4 = ds4->CreateIterator(); | |||||
| // Expect failure: invalid CSV input | |||||
| EXPECT_EQ(iter4, nullptr); | |||||
| // Test invalid field_delim | // Test invalid field_delim | ||||
| std::shared_ptr<Dataset> ds5 = CSV({file}, '"', {}, column_names); | std::shared_ptr<Dataset> ds5 = CSV({file}, '"', {}, column_names); | ||||
| EXPECT_EQ(ds5, nullptr); | |||||
| EXPECT_NE(ds5, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter5 = ds5->CreateIterator(); | |||||
| // Expect failure: invalid CSV input | |||||
| EXPECT_EQ(iter5, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCSVDatasetShuffleFilesA) { | TEST_F(MindDataTestPipeline, TestCSVDatasetShuffleFilesA) { | ||||
| @@ -555,13 +579,17 @@ TEST_F(MindDataTestPipeline, TestCSVDatasetShuffleGlobal) { | |||||
| GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); | GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCSVDatasetDuplicateColumnName) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCSVDatasetDuplicateColumnName."; | |||||
| TEST_F(MindDataTestPipeline, TestCSVDatasetDuplicateColumnNameFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCSVDatasetDuplicateColumnNameFail."; | |||||
| // Create a CSVDataset, with single CSV file | // Create a CSVDataset, with single CSV file | ||||
| std::string train_file = datasets_root_path_ + "/testCSV/1.csv"; | std::string train_file = datasets_root_path_ + "/testCSV/1.csv"; | ||||
| std::vector<std::string> column_names = {"col1", "col1", "col3", "col4"}; | std::vector<std::string> column_names = {"col1", "col1", "col3", "col4"}; | ||||
| std::shared_ptr<Dataset> ds = CSV({train_file}, ',', {}, column_names, 0, ShuffleMode::kFalse); | std::shared_ptr<Dataset> ds = CSV({train_file}, ',', {}, column_names, 0, ShuffleMode::kFalse); | ||||
| // Expect failure: duplicate column names | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid CSV input, duplicate column names | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| @@ -227,24 +227,43 @@ TEST_F(MindDataTestPipeline, TestManifestError) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestManifestError."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestManifestError."; | ||||
| std::string file_path = datasets_root_path_ + "/testManifestData/cpp.json"; | std::string file_path = datasets_root_path_ + "/testManifestData/cpp.json"; | ||||
| // Create a Manifest Dataset with not exist file | |||||
| // Create a Manifest Dataset with non-existing file | |||||
| std::shared_ptr<Dataset> ds0 = Manifest("NotExistFile", "train"); | std::shared_ptr<Dataset> ds0 = Manifest("NotExistFile", "train"); | ||||
| EXPECT_EQ(ds0, nullptr); | |||||
| EXPECT_NE(ds0, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter0 = ds0->CreateIterator(); | |||||
| // Expect failure: invalid Manifest input | |||||
| EXPECT_EQ(iter0, nullptr); | |||||
| // Create a Manifest Dataset with invalid usage | // Create a Manifest Dataset with invalid usage | ||||
| std::shared_ptr<Dataset> ds1 = Manifest(file_path, "invalid_usage"); | std::shared_ptr<Dataset> ds1 = Manifest(file_path, "invalid_usage"); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid Manifest input | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create a Manifest Dataset with invalid string | // Create a Manifest Dataset with invalid string | ||||
| std::shared_ptr<Dataset> ds2 = Manifest(":*?\"<>|`&;'", "train"); | std::shared_ptr<Dataset> ds2 = Manifest(":*?\"<>|`&;'", "train"); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid Manifest input | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestManifestWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestManifestWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestManifestWithNullSamplerError) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestManifestWithNullSamplerError."; | |||||
| std::string file_path = datasets_root_path_ + "/testManifestData/cpp.json"; | std::string file_path = datasets_root_path_ + "/testManifestData/cpp.json"; | ||||
| // Create a Manifest Dataset | // Create a Manifest Dataset | ||||
| std::shared_ptr<Dataset> ds = Manifest(file_path, "train", nullptr); | std::shared_ptr<Dataset> ds = Manifest(file_path, "train", nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Manifest input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| @@ -335,19 +335,34 @@ TEST_F(MindDataTestPipeline, TestMindDataFail1) { | |||||
| // Create a MindData Dataset with incorrect pattern | // Create a MindData Dataset with incorrect pattern | ||||
| std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/apple.mindrecord0"; | std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/apple.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds1 = MindData(file_path1); | std::shared_ptr<Dataset> ds1 = MindData(file_path1); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with incorrect pattern | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create a MindData Dataset with incorrect file path | // Create a MindData Dataset with incorrect file path | ||||
| std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/apple.mindrecord0"; | std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/apple.mindrecord0"; | ||||
| std::vector<std::string> file_list = {file_path2}; | std::vector<std::string> file_list = {file_path2}; | ||||
| std::shared_ptr<Dataset> ds2 = MindData(file_list); | std::shared_ptr<Dataset> ds2 = MindData(file_list); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with incorrect file path | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| // Create a MindData Dataset with incorrect file path | // Create a MindData Dataset with incorrect file path | ||||
| // ATTENTION: file_path3 is not a pattern to search for ".mindrecord*" | // ATTENTION: file_path3 is not a pattern to search for ".mindrecord*" | ||||
| std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord"; | std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord"; | ||||
| std::shared_ptr<Dataset> ds3 = MindData(file_path3); | std::shared_ptr<Dataset> ds3 = MindData(file_path3); | ||||
| EXPECT_EQ(ds3, nullptr); | |||||
| EXPECT_NE(ds3, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter3 = ds3->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with incorrect file path | |||||
| EXPECT_EQ(iter3, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMindDataFail2) { | TEST_F(MindDataTestPipeline, TestMindDataFail2) { | ||||
| @@ -356,12 +371,22 @@ TEST_F(MindDataTestPipeline, TestMindDataFail2) { | |||||
| // Create a MindData Dataset with incorrect column name | // Create a MindData Dataset with incorrect column name | ||||
| std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds1 = MindData(file_path1, {""}); | std::shared_ptr<Dataset> ds1 = MindData(file_path1, {""}); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with incorrect column name | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create a MindData Dataset with duplicate column name | // Create a MindData Dataset with duplicate column name | ||||
| std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds2 = MindData(file_path2, {"label", "label"}); | std::shared_ptr<Dataset> ds2 = MindData(file_path2, {"label", "label"}); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with duplicate column name | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| // Create a MindData Dataset with unexpected column name | // Create a MindData Dataset with unexpected column name | ||||
| std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| @@ -371,8 +396,9 @@ TEST_F(MindDataTestPipeline, TestMindDataFail2) { | |||||
| // Create an iterator over the result of the above dataset | // Create an iterator over the result of the above dataset | ||||
| // This will trigger the creation of the Execution Tree and launch it. | // This will trigger the creation of the Execution Tree and launch it. | ||||
| std::shared_ptr<Iterator> iter = ds3->CreateIterator(); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| std::shared_ptr<Iterator> iter3 = ds3->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with unexpected column name | |||||
| EXPECT_EQ(iter3, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMindDataFail3) { | TEST_F(MindDataTestPipeline, TestMindDataFail3) { | ||||
| @@ -384,14 +410,19 @@ TEST_F(MindDataTestPipeline, TestMindDataFail3) { | |||||
| EXPECT_NE(ds1, nullptr); | EXPECT_NE(ds1, nullptr); | ||||
| // Create an iterator over the result of the above dataset | // Create an iterator over the result of the above dataset | ||||
| // This will trigger the creation of the Execution Tree and launch it. | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | ||||
| // Expect failure: invalid MindData input with unsupported sampler | |||||
| EXPECT_EQ(iter1, nullptr); | EXPECT_EQ(iter1, nullptr); | ||||
| // Create a MindData Dataset with incorrect sampler | // Create a MindData Dataset with incorrect sampler | ||||
| std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds2 = MindData(file_path2, {}, nullptr); | std::shared_ptr<Dataset> ds2 = MindData(file_path2, {}, nullptr); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid MindData input with incorrect sampler | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMindDataFail4) { | TEST_F(MindDataTestPipeline, TestMindDataFail4) { | ||||
| @@ -400,11 +431,14 @@ TEST_F(MindDataTestPipeline, TestMindDataFail4) { | |||||
| // Create a MindData Dataset | // Create a MindData Dataset | ||||
| std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path1 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds1 = MindData(file_path1, {}, RandomSampler(), nullptr, 2); | std::shared_ptr<Dataset> ds1 = MindData(file_path1, {}, RandomSampler(), nullptr, 2); | ||||
| EXPECT_NE(ds1, nullptr); | |||||
| // num_padded is specified but padded_sample is not | |||||
| EXPECT_EQ(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid MindData input, num_padded is specified but padded_sample is not | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create paded sample for MindDataset | |||||
| // Create padded sample for MindDataset | |||||
| auto pad = nlohmann::json::object(); | auto pad = nlohmann::json::object(); | ||||
| pad["file_name"] = "1.jpg"; | pad["file_name"] = "1.jpg"; | ||||
| pad["label"] = 123456; | pad["label"] = 123456; | ||||
| @@ -412,18 +446,24 @@ TEST_F(MindDataTestPipeline, TestMindDataFail4) { | |||||
| // Create a MindData Dataset | // Create a MindData Dataset | ||||
| std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path2 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds2 = MindData(file_path2, {"label"}, RandomSampler(), pad, -2); | std::shared_ptr<Dataset> ds2 = MindData(file_path2, {"label"}, RandomSampler(), pad, -2); | ||||
| EXPECT_NE(ds2, nullptr); | |||||
| // num_padded must be greater than or equal to zero | |||||
| EXPECT_EQ(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid MindData input, num_padded is not greater than or equal to zero | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| // Create a MindData Dataset | // Create a MindData Dataset | ||||
| std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path3 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds3 = MindData(file_path3, {}, RandomSampler(), pad, 1); | std::shared_ptr<Dataset> ds3 = MindData(file_path3, {}, RandomSampler(), pad, 1); | ||||
| EXPECT_NE(ds3, nullptr); | |||||
| // padded_sample is specified and requires columns_list as well | |||||
| EXPECT_EQ(ds3, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter3 = ds3->CreateIterator(); | |||||
| // Expect failure: invalid MindData input, padded_sample is specified but requires columns_list as well | |||||
| EXPECT_EQ(iter3, nullptr); | |||||
| // Create paded sample with unmatch column name | |||||
| // Create padded sample with unmatched column name | |||||
| auto pad2 = nlohmann::json::object(); | auto pad2 = nlohmann::json::object(); | ||||
| pad2["a"] = "1.jpg"; | pad2["a"] = "1.jpg"; | ||||
| pad2["b"] = 123456; | pad2["b"] = 123456; | ||||
| @@ -431,7 +471,10 @@ TEST_F(MindDataTestPipeline, TestMindDataFail4) { | |||||
| // Create a MindData Dataset | // Create a MindData Dataset | ||||
| std::string file_path4 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | std::string file_path4 = datasets_root_path_ + "/../mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"; | ||||
| std::shared_ptr<Dataset> ds4 = MindData(file_path4, {"file_name", "label"}, RandomSampler(), pad2, 1); | std::shared_ptr<Dataset> ds4 = MindData(file_path4, {"file_name", "label"}, RandomSampler(), pad2, 1); | ||||
| EXPECT_NE(ds4, nullptr); | |||||
| // columns_list does not match any column in padded_sample | |||||
| EXPECT_EQ(ds4, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter4 = ds4->CreateIterator(); | |||||
| // Expect failure: invalid MindData input, columns_list does not match any column in padded_sample | |||||
| EXPECT_EQ(iter4, nullptr); | |||||
| } | } | ||||
| @@ -178,7 +178,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail1) { | |||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image"}, {}, {1}); | ds = ds->BucketBatchByLength({"image"}, {}, {1}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail2) { | TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail2) { | ||||
| @@ -193,7 +198,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail2) { | |||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image"}, {1}, {}); | ds = ds->BucketBatchByLength({"image"}, {1}, {}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail3) { | TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail3) { | ||||
| @@ -208,7 +218,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail3) { | |||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image"}, {-1, 1}, {1, 2, 3}); | ds = ds->BucketBatchByLength({"image"}, {-1, 1}, {1, 2, 3}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail4) { | TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail4) { | ||||
| @@ -223,7 +238,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail4) { | |||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image"}, {2, 2}, {1, 2, 3}); | ds = ds->BucketBatchByLength({"image"}, {2, 2}, {1, 2, 3}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail5) { | TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail5) { | ||||
| @@ -238,7 +258,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail5) { | |||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image"}, {1, 2}, {1, 2}); | ds = ds->BucketBatchByLength({"image"}, {1, 2}, {1, 2}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail6) { | TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail6) { | ||||
| @@ -252,7 +277,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail6) { | |||||
| EXPECT_NE(ds, nullptr); | EXPECT_NE(ds, nullptr); | ||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image"}, {1, 2}, {1, -2, 3}); | ds = ds->BucketBatchByLength({"image"}, {1, 2}, {1, -2, 3}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail7) { | TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail7) { | ||||
| @@ -267,7 +297,12 @@ TEST_F(MindDataTestPipeline, TestBucketBatchByLengthFail7) { | |||||
| // Create a BucketBatchByLength operation on ds | // Create a BucketBatchByLength operation on ds | ||||
| ds = ds->BucketBatchByLength({"image", "label"}, {1, 2}, {1, 2, 3}); | ds = ds->BucketBatchByLength({"image", "label"}, {1, 2}, {1, 2, 3}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestConcatFail1) { | TEST_F(MindDataTestPipeline, TestConcatFail1) { | ||||
| @@ -314,7 +349,12 @@ TEST_F(MindDataTestPipeline, TestConcatFail2) { | |||||
| // Create a Concat operation on the ds | // Create a Concat operation on the ds | ||||
| // Input dataset to concat is empty | // Input dataset to concat is empty | ||||
| ds = ds->Concat({}); | ds = ds->Concat({}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestConcatSuccess) { | TEST_F(MindDataTestPipeline, TestConcatSuccess) { | ||||
| @@ -564,8 +604,8 @@ TEST_F(MindDataTestPipeline, TestProjectMap) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestProjectDuplicateColumn) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestProjectDuplicateColumn."; | |||||
| TEST_F(MindDataTestPipeline, TestProjectDuplicateColumnFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestProjectDuplicateColumnFail."; | |||||
| // Create an ImageFolder Dataset | // Create an ImageFolder Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testPK/data/"; | std::string folder_path = datasets_root_path_ + "/testPK/data/"; | ||||
| @@ -583,13 +623,18 @@ TEST_F(MindDataTestPipeline, TestProjectDuplicateColumn) { | |||||
| // Create a Project operation on ds | // Create a Project operation on ds | ||||
| std::vector<std::string> column_project = {"image", "image"}; | std::vector<std::string> column_project = {"image", "image"}; | ||||
| // Expect failure: duplicate project column name | |||||
| // Create a Project operation on ds | |||||
| ds = ds->Project(column_project); | ds = ds->Project(column_project); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: duplicate project op column name | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMapDuplicateColumn) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMapDuplicateColumn."; | |||||
| TEST_F(MindDataTestPipeline, TestMapDuplicateColumnFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMapDuplicateColumnFail."; | |||||
| // Create an ImageFolder Dataset | // Create an ImageFolder Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testPK/data/"; | std::string folder_path = datasets_root_path_ + "/testPK/data/"; | ||||
| @@ -602,18 +647,30 @@ TEST_F(MindDataTestPipeline, TestMapDuplicateColumn) { | |||||
| // Create a Map operation on ds | // Create a Map operation on ds | ||||
| auto ds1 = ds->Map({random_vertical_flip_op}, {"image", "image"}, {}, {}); | auto ds1 = ds->Map({random_vertical_flip_op}, {"image", "image"}, {}, {}); | ||||
| // Expect failure: duplicate input column name | |||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: duplicate Map op input column name | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create a Map operation on ds | // Create a Map operation on ds | ||||
| auto ds2 = ds->Map({random_vertical_flip_op}, {}, {"label", "label"}, {}); | auto ds2 = ds->Map({random_vertical_flip_op}, {}, {"label", "label"}, {}); | ||||
| // Expect failure: duplicate output column name | |||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: duplicate Map op output column name | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| // Create a Map operation on ds | // Create a Map operation on ds | ||||
| auto ds3 = ds->Map({random_vertical_flip_op}, {}, {}, {"image", "image"}); | auto ds3 = ds->Map({random_vertical_flip_op}, {}, {}, {"image", "image"}); | ||||
| // Expect failure: duplicate project column name | |||||
| EXPECT_EQ(ds3, nullptr); | |||||
| EXPECT_NE(ds3, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter3 = ds3->CreateIterator(); | |||||
| // Expect failure: duplicate Map op project column name | |||||
| EXPECT_EQ(iter3, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestProjectMapAutoInjection) { | TEST_F(MindDataTestPipeline, TestProjectMapAutoInjection) { | ||||
| @@ -683,7 +740,12 @@ TEST_F(MindDataTestPipeline, TestRenameFail1) { | |||||
| // Create a Rename operation on ds | // Create a Rename operation on ds | ||||
| ds = ds->Rename({"image", "label"}, {"col2"}); | ds = ds->Rename({"image", "label"}, {"col2"}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestRenameFail2) { | TEST_F(MindDataTestPipeline, TestRenameFail2) { | ||||
| @@ -697,7 +759,12 @@ TEST_F(MindDataTestPipeline, TestRenameFail2) { | |||||
| // Create a Rename operation on ds | // Create a Rename operation on ds | ||||
| ds = ds->Rename({"image", "label"}, {"col2", ""}); | ds = ds->Rename({"image", "label"}, {"col2", ""}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestRenameFail3) { | TEST_F(MindDataTestPipeline, TestRenameFail3) { | ||||
| @@ -711,11 +778,21 @@ TEST_F(MindDataTestPipeline, TestRenameFail3) { | |||||
| // Create a Rename operation on ds | // Create a Rename operation on ds | ||||
| auto ds1 = ds->Rename({"image", "image"}, {"col1", "col2"}); | auto ds1 = ds->Rename({"image", "image"}, {"col1", "col2"}); | ||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create a Rename operation on ds | // Create a Rename operation on ds | ||||
| auto ds2 = ds->Rename({"image", "label"}, {"col1", "col1"}); | auto ds2 = ds->Rename({"image", "label"}, {"col1", "col1"}); | ||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestRenameSuccess) { | TEST_F(MindDataTestPipeline, TestRenameSuccess) { | ||||
| @@ -861,7 +938,12 @@ TEST_F(MindDataTestPipeline, TestRepeatFail1) { | |||||
| // Create a Repeat operation on ds | // Create a Repeat operation on ds | ||||
| int32_t repeat_num = 0; | int32_t repeat_num = 0; | ||||
| ds = ds->Repeat(repeat_num); | ds = ds->Repeat(repeat_num); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestRepeatFail2) { | TEST_F(MindDataTestPipeline, TestRepeatFail2) { | ||||
| @@ -876,7 +958,12 @@ TEST_F(MindDataTestPipeline, TestRepeatFail2) { | |||||
| // Create a Repeat operation on ds | // Create a Repeat operation on ds | ||||
| int32_t repeat_num = -2; | int32_t repeat_num = -2; | ||||
| ds = ds->Repeat(repeat_num); | ds = ds->Repeat(repeat_num); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestShuffleDataset) { | TEST_F(MindDataTestPipeline, TestShuffleDataset) { | ||||
| @@ -990,8 +1077,12 @@ TEST_F(MindDataTestPipeline, TestSkipDatasetError1) { | |||||
| // Create a Skip operation on ds with invalid count input | // Create a Skip operation on ds with invalid count input | ||||
| int32_t count = -1; | int32_t count = -1; | ||||
| ds = ds->Skip(count); | ds = ds->Skip(count); | ||||
| // Expect nullptr for invalid input skip_count | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Skip input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTakeDatasetDefault) { | TEST_F(MindDataTestPipeline, TestTakeDatasetDefault) { | ||||
| @@ -1057,14 +1148,22 @@ TEST_F(MindDataTestPipeline, TestTakeDatasetError1) { | |||||
| // Create a Take operation on ds with invalid count input | // Create a Take operation on ds with invalid count input | ||||
| int32_t count = -5; | int32_t count = -5; | ||||
| auto ds1 = ds->Take(count); | auto ds1 = ds->Take(count); | ||||
| // Expect nullptr for invalid input take_count | |||||
| EXPECT_EQ(ds1, nullptr); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds1->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| // Create a Take operation on ds with invalid count input | // Create a Take operation on ds with invalid count input | ||||
| count = 0; | count = 0; | ||||
| auto ds2 = ds->Take(count); | auto ds2 = ds->Take(count); | ||||
| // Expect nullptr for invalid input take_count | |||||
| EXPECT_EQ(ds2, nullptr); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| iter = ds2->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTakeDatasetNormal) { | TEST_F(MindDataTestPipeline, TestTakeDatasetNormal) { | ||||
| @@ -1197,7 +1296,12 @@ TEST_F(MindDataTestPipeline, TestZipFail2) { | |||||
| // Create a Zip operation on the datasets | // Create a Zip operation on the datasets | ||||
| // Input dataset to zip is empty | // Input dataset to zip is empty | ||||
| ds = Zip({}); | ds = Zip({}); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Op input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestZipSuccess) { | TEST_F(MindDataTestPipeline, TestZipSuccess) { | ||||
| @@ -125,13 +125,16 @@ TEST_F(MindDataTestPipeline, TestTextFileGetDatasetSize) { | |||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail1) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail1) { | ||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail1."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail1."; | ||||
| // Attempt to create a TextFile Dataset | |||||
| // Create a TextFile Dataset | |||||
| // with invalid samplers=-1 | // with invalid samplers=-1 | ||||
| std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | ||||
| std::shared_ptr<Dataset> ds = TextFile({tf_file1}, -1); | std::shared_ptr<Dataset> ds = TextFile({tf_file1}, -1); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Expect failure: Number of samples cannot be negative | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: TextFile number of samples cannot be negative | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) { | ||||
| @@ -140,68 +143,86 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) { | |||||
| // Attempt to create a TextFile Dataset | // Attempt to create a TextFile Dataset | ||||
| // with wrongful empty dataset_files input | // with wrongful empty dataset_files input | ||||
| std::shared_ptr<Dataset> ds = TextFile({}); | std::shared_ptr<Dataset> ds = TextFile({}); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: dataset_files is not specified | // Expect failure: dataset_files is not specified | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail3) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail3) { | ||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail3."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail3."; | ||||
| // Attempt to create a TextFile Dataset | |||||
| // Create a TextFile Dataset | |||||
| // with non-existent dataset_files input | // with non-existent dataset_files input | ||||
| std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | ||||
| std::shared_ptr<Dataset> ds = TextFile({tf_file1, "notexist.txt"}, 0, ShuffleMode::kFalse); | std::shared_ptr<Dataset> ds = TextFile({tf_file1, "notexist.txt"}, 0, ShuffleMode::kFalse); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: specified dataset_files does not exist | // Expect failure: specified dataset_files does not exist | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail4) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail4) { | ||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail4."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail4."; | ||||
| // Attempt to create a TextFile Dataset | |||||
| // Create a TextFile Dataset | |||||
| // with empty string dataset_files input | // with empty string dataset_files input | ||||
| std::shared_ptr<Dataset> ds = TextFile({""}, 0, ShuffleMode::kFiles); | std::shared_ptr<Dataset> ds = TextFile({""}, 0, ShuffleMode::kFiles); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: specified dataset_files does not exist | // Expect failure: specified dataset_files does not exist | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail5) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail5) { | ||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail5."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail5."; | ||||
| // Attempt to create a TextFile Dataset | |||||
| // Create a TextFile Dataset | |||||
| // with invalid num_shards=0 value | // with invalid num_shards=0 value | ||||
| std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | ||||
| std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 1, ShuffleMode::kFalse, 0); | std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 1, ShuffleMode::kFalse, 0); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: Number of shards cannot be <=0 | // Expect failure: Number of shards cannot be <=0 | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail6) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail6) { | ||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail6."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail6."; | ||||
| // Attempt to create a TextFile Dataset | |||||
| // Create a TextFile Dataset | |||||
| // with invalid shard_id=-1 value | // with invalid shard_id=-1 value | ||||
| std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | ||||
| std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kFiles, -1); | std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kFiles, -1); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: shard_id cannot be negative | // Expect failure: shard_id cannot be negative | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetFail7) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetFail7) { | ||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail7."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail7."; | ||||
| // Attempt to create a TextFile Dataset | |||||
| // Create a TextFile Dataset | |||||
| // with invalid shard_id=2 and num_shards=2 combination | // with invalid shard_id=2 and num_shards=2 combination | ||||
| std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; | ||||
| std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal, 2, 2); | std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal, 2, 2); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset. | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: Cannot have shard_id >= num_shards | // Expect failure: Cannot have shard_id >= num_shards | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1A) { | TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1A) { | ||||
| @@ -148,18 +148,26 @@ TEST_F(MindDataTestPipeline, TestVOCDetection) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrMode) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrMode."; | |||||
| TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrModeError1) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrModeError1."; | |||||
| // Create a VOC Dataset | // Create a VOC Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; | std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; | ||||
| std::shared_ptr<Dataset> ds_1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3)); | |||||
| // Expect nullptr for invalid task | |||||
| EXPECT_EQ(ds_1, nullptr); | |||||
| std::shared_ptr<Dataset> ds1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3)); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| std::shared_ptr<Dataset> ds_2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4)); | |||||
| // Expect nullptr for invalid mode | |||||
| EXPECT_EQ(ds_2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid Manifest input, invalid task | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| std::shared_ptr<Dataset> ds2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4)); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid VOC input, invalid mode | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestVOCSegmentation) { | TEST_F(MindDataTestPipeline, TestVOCSegmentation) { | ||||
| @@ -212,25 +220,32 @@ TEST_F(MindDataTestPipeline, TestVOCSegmentation) { | |||||
| iter->Stop(); | iter->Stop(); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestVOCSegmentationError1) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentationError1."; | |||||
| TEST_F(MindDataTestPipeline, TestVOCSegmentationError2) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentationError2."; | |||||
| // Create a VOC Dataset | // Create a VOC Dataset | ||||
| std::map<std::string, int32_t> class_index; | std::map<std::string, int32_t> class_index; | ||||
| class_index["car"] = 0; | class_index["car"] = 0; | ||||
| std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; | std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; | ||||
| std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", class_index, false, RandomSampler(false, 6)); | std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", class_index, false, RandomSampler(false, 6)); | ||||
| EXPECT_NE(ds, nullptr); | |||||
| // Expect nullptr for segmentation task with class_index | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid VOC input, segmentation task with class_index | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestVOCWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestVOCWithNullSamplerError3) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCWithNullSamplerError3."; | |||||
| // Create a VOC Dataset | // Create a VOC Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; | std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; | ||||
| std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", {}, false, nullptr); | std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", {}, false, nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid VOC input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| @@ -167,39 +167,61 @@ TEST_F(MindDataTestPipeline, TestCelebAGetDatasetSize) { | |||||
| EXPECT_EQ(ds->GetDatasetSize(), 1); | EXPECT_EQ(ds->GetDatasetSize(), 1); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCelebAException) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebAException."; | |||||
| TEST_F(MindDataTestPipeline, TestCelebAError) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebAError."; | |||||
| // Create a CelebA Dataset | |||||
| std::string folder_path = datasets_root_path_ + "/testCelebAData/"; | std::string folder_path = datasets_root_path_ + "/testCelebAData/"; | ||||
| std::string invalid_folder_path = "./testNotExist"; | std::string invalid_folder_path = "./testNotExist"; | ||||
| std::string invalid_dataset_type = "invalid_type"; | std::string invalid_dataset_type = "invalid_type"; | ||||
| std::shared_ptr<Dataset> ds = CelebA(invalid_folder_path); | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| std::shared_ptr<Dataset> ds1 = CelebA(folder_path, invalid_dataset_type); | |||||
| EXPECT_EQ(ds1, nullptr); | |||||
| // Create a CelebA Dataset | |||||
| std::shared_ptr<Dataset> ds1 = CelebA(invalid_folder_path); | |||||
| EXPECT_NE(ds1, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter1 = ds1->CreateIterator(); | |||||
| // Expect failure: invalid CelebA input, invalid dataset path | |||||
| EXPECT_EQ(iter1, nullptr); | |||||
| // Create a CelebA Dataset | |||||
| std::shared_ptr<Dataset> ds2 = CelebA(folder_path, invalid_dataset_type); | |||||
| EXPECT_NE(ds2, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter2 = ds2->CreateIterator(); | |||||
| // Expect failure: invalid CelebA input, invalid dataset type | |||||
| EXPECT_EQ(iter2, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestCelebADatasetWithNullSampler) { | |||||
| TEST_F(MindDataTestPipeline, TestCelebADatasetWithNullSamplerError) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebADataset."; | MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebADataset."; | ||||
| // Create a CelebA Dataset | // Create a CelebA Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testCelebAData/"; | std::string folder_path = datasets_root_path_ + "/testCelebAData/"; | ||||
| std::shared_ptr<Dataset> ds = CelebA(folder_path, "all", nullptr, false, {}); | std::shared_ptr<Dataset> ds = CelebA(folder_path, "all", nullptr, false, {}); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid CelebA input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestImageFolderWithWrongDatasetDir) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderWithWrongDatasetDir."; | |||||
| TEST_F(MindDataTestPipeline, TestImageFolderWithWrongDatasetDirFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderWithWrongDatasetDirFail."; | |||||
| // Create an ImageFolder Dataset | // Create an ImageFolder Dataset | ||||
| std::shared_ptr<Dataset> ds = ImageFolder("", true, nullptr); | std::shared_ptr<Dataset> ds = ImageFolder("", true, nullptr); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid ImageFolder input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestImageFolderFailWithWrongExtension) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFailWithWrongExtension."; | |||||
| TEST_F(MindDataTestPipeline, TestImageFolderFailWithWrongExtensionFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFailWithWrongExtensionFail."; | |||||
| // Create an ImageFolder Dataset | // Create an ImageFolder Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testPK/data/"; | std::string folder_path = datasets_root_path_ + "/testPK/data/"; | ||||
| @@ -214,7 +236,7 @@ TEST_F(MindDataTestPipeline, TestImageFolderFailWithWrongExtension) { | |||||
| // Iterate the dataset and get each row | // Iterate the dataset and get each row | ||||
| std::unordered_map<std::string, std::shared_ptr<Tensor>> row; | std::unordered_map<std::string, std::shared_ptr<Tensor>> row; | ||||
| iter->GetNextRow(&row); | iter->GetNextRow(&row); | ||||
| // Expect no data: can not find files with specified extension | |||||
| // Expect no data: cannot find files with specified extension | |||||
| EXPECT_EQ(row.size(), 0); | EXPECT_EQ(row.size(), 0); | ||||
| // Manually terminate the pipeline | // Manually terminate the pipeline | ||||
| @@ -236,24 +258,32 @@ TEST_F(MindDataTestPipeline, TestImageFolderGetters) { | |||||
| EXPECT_EQ(ds->GetDatasetSize(), 44); | EXPECT_EQ(ds->GetDatasetSize(), 44); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestImageFolderFailWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFailWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestImageFolderFailWithNullSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFailWithNullSamplerFail."; | |||||
| // Create an ImageFolder Dataset | // Create an ImageFolder Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testPK/data/"; | std::string folder_path = datasets_root_path_ + "/testPK/data/"; | ||||
| std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, nullptr); | std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid ImageFolder input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestImageFolderFailWithWrongSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFailWithWrongSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestImageFolderFailWithWrongSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFailWithWrongSamplerFail."; | |||||
| // Create a Cifar10 Dataset | |||||
| std::string folder_path = datasets_root_path_ + "/testCifar100Data/"; | |||||
| // Create an ImageFolder Dataset | |||||
| std::string folder_path = datasets_root_path_ + "/testPK/data/"; | |||||
| std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, SequentialSampler(-2, 5)); | std::shared_ptr<Dataset> ds = ImageFolder(folder_path, true, SequentialSampler(-2, 5)); | ||||
| // Expect failure: sampler is not construnced correctly | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid ImageFolder input, sampler is not constructed correctly | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMnistGetDatasetSize) { | TEST_F(MindDataTestPipeline, TestMnistGetDatasetSize) { | ||||
| @@ -266,20 +296,29 @@ TEST_F(MindDataTestPipeline, TestMnistGetDatasetSize) { | |||||
| EXPECT_EQ(ds->GetDatasetSize(), 20); | EXPECT_EQ(ds->GetDatasetSize(), 20); | ||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMnistFailWithWrongDatasetDir) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFailWithWrongDatasetDir."; | |||||
| TEST_F(MindDataTestPipeline, TestMnistFailWithWrongDatasetDirFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFailWithWrongDatasetDirFail."; | |||||
| // Create a Mnist Dataset | // Create a Mnist Dataset | ||||
| std::shared_ptr<Dataset> ds = Mnist("", "all", RandomSampler(false, 10)); | std::shared_ptr<Dataset> ds = Mnist("", "all", RandomSampler(false, 10)); | ||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Mnist input, incorrect dataset directory input | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||
| TEST_F(MindDataTestPipeline, TestMnistFailWithNullSampler) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFailWithNullSampler."; | |||||
| TEST_F(MindDataTestPipeline, TestMnistFailWithNullSamplerFail) { | |||||
| MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFailWithNullSamplerFail."; | |||||
| // Create a Mnist Dataset | // Create a Mnist Dataset | ||||
| std::string folder_path = datasets_root_path_ + "/testMnistData/"; | std::string folder_path = datasets_root_path_ + "/testMnistData/"; | ||||
| std::shared_ptr<Dataset> ds = Mnist(folder_path, "all", nullptr); | std::shared_ptr<Dataset> ds = Mnist(folder_path, "all", nullptr); | ||||
| // Expect failure: sampler can not be nullptr | |||||
| EXPECT_EQ(ds, nullptr); | |||||
| EXPECT_NE(ds, nullptr); | |||||
| // Create an iterator over the result of the above dataset | |||||
| std::shared_ptr<Iterator> iter = ds->CreateIterator(); | |||||
| // Expect failure: invalid Mnist input, sampler cannot be nullptr | |||||
| EXPECT_EQ(iter, nullptr); | |||||
| } | } | ||||