You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

datasets.cc 79 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <fstream>
  17. #include <unordered_set>
  18. #include <algorithm>
  19. #include "minddata/dataset/include/datasets.h"
  20. #include "minddata/dataset/include/samplers.h"
  21. #include "minddata/dataset/include/transforms.h"
  22. #include "minddata/dataset/engine/dataset_iterator.h"
  23. // Source dataset headers (in alphabetical order)
  24. #include "minddata/dataset/engine/datasetops/source/album_op.h"
  25. #include "minddata/dataset/engine/datasetops/source/celeba_op.h"
  26. #include "minddata/dataset/engine/datasetops/source/cifar_op.h"
  27. #include "minddata/dataset/engine/datasetops/source/clue_op.h"
  28. #include "minddata/dataset/engine/datasetops/source/coco_op.h"
  29. #include "minddata/dataset/engine/datasetops/source/csv_op.h"
  30. #include "minddata/dataset/engine/datasetops/source/image_folder_op.h"
  31. #ifndef ENABLE_ANDROID
  32. #include "minddata/dataset/engine/datasetops/source/manifest_op.h"
  33. #endif
  34. #include "minddata/dataset/engine/datasetops/source/mnist_op.h"
  35. #include "minddata/dataset/engine/datasetops/source/random_data_op.h"
  36. #include "minddata/dataset/engine/datasetops/source/text_file_op.h"
  37. #ifndef ENABLE_ANDROID
  38. #include "minddata/dataset/engine/datasetops/source/tf_reader_op.h"
  39. #include "minddata/dataset/engine/datasetops/source/voc_op.h"
  40. #endif
  41. // Dataset operator headers (in alphabetical order)
  42. #include "minddata/dataset/engine/datasetops/batch_op.h"
  43. #ifndef ENABLE_ANDROID
  44. #include "minddata/dataset/engine/datasetops/bucket_batch_by_length_op.h"
  45. #endif
  46. #include "minddata/dataset/engine/datasetops/build_vocab_op.h"
  47. #include "minddata/dataset/engine/datasetops/concat_op.h"
  48. #include "minddata/dataset/engine/datasetops/map_op/map_op.h"
  49. #include "minddata/dataset/engine/datasetops/project_op.h"
  50. #include "minddata/dataset/engine/datasetops/rename_op.h"
  51. #include "minddata/dataset/engine/datasetops/repeat_op.h"
  52. #include "minddata/dataset/engine/datasetops/shuffle_op.h"
  53. #include "minddata/dataset/engine/datasetops/skip_op.h"
  54. #include "minddata/dataset/engine/datasetops/take_op.h"
  55. #include "minddata/dataset/engine/datasetops/zip_op.h"
  56. // Sampler headers (in alphabetical order)
  57. #include "minddata/dataset/engine/datasetops/source/sampler/sampler.h"
  58. #include "minddata/dataset/engine/datasetops/source/sampler/random_sampler.h"
  59. #include "minddata/dataset/engine/datasetops/source/sampler/sequential_sampler.h"
  60. #include "minddata/dataset/core/config_manager.h"
  61. #include "minddata/dataset/util/random.h"
  62. #include "minddata/dataset/util/path.h"
  63. namespace mindspore {
  64. namespace dataset {
  65. namespace api {
  66. #define RETURN_EMPTY_IF_ERROR(_s) \
  67. do { \
  68. Status __rc = (_s); \
  69. if (__rc.IsError()) { \
  70. MS_LOG(ERROR) << __rc; \
  71. return {}; \
  72. } \
  73. } while (false)
  74. // Function to create the iterator, which will build and launch the execution tree.
  75. std::shared_ptr<Iterator> Dataset::CreateIterator(std::vector<std::string> columns) {
  76. std::shared_ptr<Iterator> iter;
  77. try {
  78. auto ds = shared_from_this();
  79. // The specified columns will be selected from the dataset and passed down the pipeline
  80. // in the order specified, other columns will be discarded.
  81. if (!columns.empty()) {
  82. ds = ds->Project(columns);
  83. }
  84. iter = std::make_shared<Iterator>();
  85. Status rc = iter->BuildAndLaunchTree(ds);
  86. if (rc.IsError()) {
  87. MS_LOG(ERROR) << "CreateIterator failed." << rc;
  88. return nullptr;
  89. }
  90. return iter;
  91. } catch (const std::exception &err) {
  92. MS_LOG(ERROR) << "CreateIterator: Iterator exception caught: " << err.what();
  93. return nullptr;
  94. }
  95. return iter;
  96. }
  97. // Constructor
  98. Dataset::Dataset() {
  99. // Fetch some default value from config manager
  100. std::shared_ptr<ConfigManager> cfg = GlobalContext::config_manager();
  101. num_workers_ = cfg->num_parallel_workers();
  102. rows_per_buffer_ = cfg->rows_per_buffer();
  103. connector_que_size_ = cfg->op_connector_size();
  104. worker_connector_size_ = cfg->worker_connector_size();
  105. }
  106. /// \brief Function to create a SchemaObj
  107. /// \param[in] schema_file Path of schema file
  108. /// \return Shared pointer to the current schema
  109. std::shared_ptr<SchemaObj> Schema(const std::string &schema_file) {
  110. auto schema = std::make_shared<SchemaObj>(schema_file);
  111. return schema->init() ? schema : nullptr;
  112. }
  113. // FUNCTIONS TO CREATE DATASETS FOR LEAF-NODE DATASETS
  114. // (In alphabetical order)
  115. // Function to create a AlbumDataset.
  116. std::shared_ptr<AlbumDataset> Album(const std::string &dataset_dir, const std::string &data_schema,
  117. const std::vector<std::string> &column_names, bool decode,
  118. const std::shared_ptr<SamplerObj> &sampler) {
  119. auto ds = std::make_shared<AlbumDataset>(dataset_dir, data_schema, column_names, decode, sampler);
  120. return ds->ValidateParams() ? ds : nullptr;
  121. }
  122. // Function to create a CelebADataset.
  123. std::shared_ptr<CelebADataset> CelebA(const std::string &dataset_dir, const std::string &usage,
  124. const std::shared_ptr<SamplerObj> &sampler, bool decode,
  125. const std::set<std::string> &extensions) {
  126. auto ds = std::make_shared<CelebADataset>(dataset_dir, usage, sampler, decode, extensions);
  127. // Call derived class validation method.
  128. return ds->ValidateParams() ? ds : nullptr;
  129. }
  130. // Function to create a Cifar10Dataset.
  131. std::shared_ptr<Cifar10Dataset> Cifar10(const std::string &dataset_dir, const std::string &usage,
  132. const std::shared_ptr<SamplerObj> &sampler) {
  133. auto ds = std::make_shared<Cifar10Dataset>(dataset_dir, usage, sampler);
  134. // Call derived class validation method.
  135. return ds->ValidateParams() ? ds : nullptr;
  136. }
  137. // Function to create a Cifar100Dataset.
  138. std::shared_ptr<Cifar100Dataset> Cifar100(const std::string &dataset_dir, const std::string &usage,
  139. const std::shared_ptr<SamplerObj> &sampler) {
  140. auto ds = std::make_shared<Cifar100Dataset>(dataset_dir, usage, sampler);
  141. // Call derived class validation method.
  142. return ds->ValidateParams() ? ds : nullptr;
  143. }
  144. // Function to create a CLUEDataset.
  145. std::shared_ptr<CLUEDataset> CLUE(const std::vector<std::string> &clue_files, const std::string &task,
  146. const std::string &usage, int64_t num_samples, ShuffleMode shuffle,
  147. int32_t num_shards, int32_t shard_id) {
  148. auto ds = std::make_shared<CLUEDataset>(clue_files, task, usage, num_samples, shuffle, num_shards, shard_id);
  149. // Call derived class validation method.
  150. return ds->ValidateParams() ? ds : nullptr;
  151. }
  152. // Function to create a CocoDataset.
  153. std::shared_ptr<CocoDataset> Coco(const std::string &dataset_dir, const std::string &annotation_file,
  154. const std::string &task, const bool &decode,
  155. const std::shared_ptr<SamplerObj> &sampler) {
  156. auto ds = std::make_shared<CocoDataset>(dataset_dir, annotation_file, task, decode, sampler);
  157. // Call derived class validation method.
  158. return ds->ValidateParams() ? ds : nullptr;
  159. }
  160. // Function to create a CSVDataset.
  161. std::shared_ptr<CSVDataset> CSV(const std::vector<std::string> &dataset_files, char field_delim,
  162. const std::vector<std::shared_ptr<CsvBase>> &column_defaults,
  163. const std::vector<std::string> &column_names, int64_t num_samples, ShuffleMode shuffle,
  164. int32_t num_shards, int32_t shard_id) {
  165. auto ds = std::make_shared<CSVDataset>(dataset_files, field_delim, column_defaults, column_names, num_samples,
  166. shuffle, num_shards, shard_id);
  167. // Call derived class validation method.
  168. return ds->ValidateParams() ? ds : nullptr;
  169. }
  170. // Function to create a ImageFolderDataset.
  171. std::shared_ptr<ImageFolderDataset> ImageFolder(const std::string &dataset_dir, bool decode,
  172. const std::shared_ptr<SamplerObj> &sampler,
  173. const std::set<std::string> &extensions,
  174. const std::map<std::string, int32_t> &class_indexing) {
  175. // This arg exists in ImageFolderOp, but not externalized (in Python API). The default value is false.
  176. bool recursive = false;
  177. // Create logical representation of ImageFolderDataset.
  178. auto ds = std::make_shared<ImageFolderDataset>(dataset_dir, decode, sampler, recursive, extensions, class_indexing);
  179. // Call derived class validation method.
  180. return ds->ValidateParams() ? ds : nullptr;
  181. }
  182. #ifndef ENABLE_ANDROID
  183. // Function to create a ManifestDataset.
  184. std::shared_ptr<ManifestDataset> Manifest(const std::string &dataset_file, const std::string &usage,
  185. const std::shared_ptr<SamplerObj> &sampler,
  186. const std::map<std::string, int32_t> &class_indexing, bool decode) {
  187. auto ds = std::make_shared<ManifestDataset>(dataset_file, usage, sampler, class_indexing, decode);
  188. // Call derived class validation method.
  189. return ds->ValidateParams() ? ds : nullptr;
  190. }
  191. #endif
  192. // Function to create a MnistDataset.
  193. std::shared_ptr<MnistDataset> Mnist(const std::string &dataset_dir, const std::string &usage,
  194. const std::shared_ptr<SamplerObj> &sampler) {
  195. auto ds = std::make_shared<MnistDataset>(dataset_dir, usage, sampler);
  196. // Call derived class validation method.
  197. return ds->ValidateParams() ? ds : nullptr;
  198. }
  199. // Function to overload "+" operator to concat two datasets
  200. std::shared_ptr<ConcatDataset> operator+(const std::shared_ptr<Dataset> &datasets1,
  201. const std::shared_ptr<Dataset> &datasets2) {
  202. std::shared_ptr<ConcatDataset> ds = std::make_shared<ConcatDataset>(std::vector({datasets2, datasets1}));
  203. // Call derived class validation method.
  204. return ds->ValidateParams() ? ds : nullptr;
  205. }
  206. // Function to create a TextFileDataset.
  207. std::shared_ptr<TextFileDataset> TextFile(const std::vector<std::string> &dataset_files, int64_t num_samples,
  208. ShuffleMode shuffle, int32_t num_shards, int32_t shard_id) {
  209. auto ds = std::make_shared<TextFileDataset>(dataset_files, num_samples, shuffle, num_shards, shard_id);
  210. // Call derived class validation method.
  211. return ds->ValidateParams() ? ds : nullptr;
  212. }
  213. #ifndef ENABLE_ANDROID
  214. // Function to create a VOCDataset.
  215. std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::string &task, const std::string &usage,
  216. const std::map<std::string, int32_t> &class_indexing, bool decode,
  217. const std::shared_ptr<SamplerObj> &sampler) {
  218. auto ds = std::make_shared<VOCDataset>(dataset_dir, task, usage, class_indexing, decode, sampler);
  219. // Call derived class validation method.
  220. return ds->ValidateParams() ? ds : nullptr;
  221. }
  222. #endif
  223. // Function to create a ZipDataset.
  224. std::shared_ptr<ZipDataset> Zip(const std::vector<std::shared_ptr<Dataset>> &datasets) {
  225. auto ds = std::make_shared<ZipDataset>(datasets);
  226. // Call derived class validation method.
  227. return ds->ValidateParams() ? ds : nullptr;
  228. }
  229. // FUNCTIONS TO CREATE DATASETS FOR DATASET OPS
  230. // (In alphabetical order)
  231. // Function to create a Batch dataset
  232. std::shared_ptr<BatchDataset> Dataset::Batch(int32_t batch_size, bool drop_remainder) {
  233. // Default values
  234. std::vector<std::string> cols_to_map = {};
  235. std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> pad_map;
  236. bool pad = false;
  237. auto ds = std::make_shared<BatchDataset>(batch_size, drop_remainder, pad, cols_to_map, pad_map);
  238. if (!ds->ValidateParams()) {
  239. return nullptr;
  240. }
  241. ds->children.push_back(shared_from_this());
  242. return ds;
  243. }
  244. #ifndef ENABLE_ANDROID
  245. // Function to create a BucketBatchByLength dataset
  246. std::shared_ptr<BucketBatchByLengthDataset> Dataset::BucketBatchByLength(
  247. const std::vector<std::string> &column_names, const std::vector<int32_t> &bucket_boundaries,
  248. const std::vector<int32_t> &bucket_batch_sizes, std::function<TensorRow(TensorRow)> element_length_function,
  249. const std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> &pad_info, bool pad_to_bucket_boundary,
  250. bool drop_remainder) {
  251. auto ds = std::make_shared<BucketBatchByLengthDataset>(column_names, bucket_boundaries, bucket_batch_sizes,
  252. element_length_function, pad_info, pad_to_bucket_boundary,
  253. drop_remainder);
  254. if (!ds->ValidateParams()) {
  255. return nullptr;
  256. }
  257. ds->children.push_back(shared_from_this());
  258. return ds;
  259. }
  260. // Function to create a Vocab from dataset
  261. std::shared_ptr<Vocab> Dataset::BuildVocab(const std::vector<std::string> &columns,
  262. const std::pair<int64_t, int64_t> &freq_range, int64_t top_k,
  263. const std::vector<std::string> &special_tokens, bool special_first) {
  264. auto vocab = std::make_shared<Vocab>();
  265. auto ds = std::make_shared<BuildVocabDataset>(vocab, columns, freq_range, top_k, special_tokens, special_first);
  266. if (!ds->ValidateParams()) {
  267. return nullptr;
  268. }
  269. ds->children.push_back(shared_from_this());
  270. // Run tree here to starting building vocab
  271. std::shared_ptr<Iterator> iter = ds->CreateIterator();
  272. if (iter == nullptr) {
  273. MS_LOG(ERROR) << "Fail to run iterator in BuildVocab.";
  274. return nullptr;
  275. }
  276. // Finish building vocab by triggering GetNextRow
  277. std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
  278. if (!iter->GetNextRow(&row)) {
  279. return nullptr;
  280. }
  281. return vocab;
  282. }
  283. #endif
  284. // Function to create a Concat dataset
  285. std::shared_ptr<ConcatDataset> Dataset::Concat(const std::vector<std::shared_ptr<Dataset>> &datasets) {
  286. auto ds = std::make_shared<ConcatDataset>(datasets);
  287. ds->children.push_back(shared_from_this());
  288. return ds->ValidateParams() ? ds : nullptr;
  289. }
  290. // Function to create a Map dataset.
  291. std::shared_ptr<MapDataset> Dataset::Map(std::vector<std::shared_ptr<TensorOperation>> operations,
  292. std::vector<std::string> input_columns,
  293. std::vector<std::string> output_columns,
  294. const std::vector<std::string> &project_columns) {
  295. auto ds = std::make_shared<MapDataset>(operations, input_columns, output_columns, project_columns);
  296. if (!ds->ValidateParams()) {
  297. return nullptr;
  298. }
  299. ds->children.push_back(shared_from_this());
  300. return ds;
  301. }
  302. // Function to create a ProjectDataset.
  303. std::shared_ptr<ProjectDataset> Dataset::Project(const std::vector<std::string> &columns) {
  304. auto ds = std::make_shared<ProjectDataset>(columns);
  305. // Call derived class validation method.
  306. if (!ds->ValidateParams()) {
  307. return nullptr;
  308. }
  309. ds->children.push_back(shared_from_this());
  310. return ds;
  311. }
  312. // Function to create a RenameDataset.
  313. std::shared_ptr<RenameDataset> Dataset::Rename(const std::vector<std::string> &input_columns,
  314. const std::vector<std::string> &output_columns) {
  315. auto ds = std::make_shared<RenameDataset>(input_columns, output_columns);
  316. // Call derived class validation method.
  317. if (!ds->ValidateParams()) {
  318. return nullptr;
  319. }
  320. ds->children.push_back(shared_from_this());
  321. return ds;
  322. }
  323. // Function to create Repeat dataset.
  324. std::shared_ptr<Dataset> Dataset::Repeat(int32_t count) {
  325. // Workaround for repeat == 1, do not inject repeat.
  326. if (count == 1) {
  327. return shared_from_this();
  328. }
  329. auto ds = std::make_shared<RepeatDataset>(count);
  330. if (!ds->ValidateParams()) {
  331. return nullptr;
  332. }
  333. ds->children.push_back(shared_from_this());
  334. return ds;
  335. }
  336. // Function to create a ShuffleOp
  337. std::shared_ptr<ShuffleDataset> Dataset::Shuffle(int32_t buffer_size) {
  338. // Pass in reshuffle_each_epoch with true
  339. auto ds = std::make_shared<ShuffleDataset>(buffer_size, true);
  340. if (!ds->ValidateParams()) {
  341. return nullptr;
  342. }
  343. ds->children.push_back(shared_from_this());
  344. return ds;
  345. }
  346. // Function to create a SkipDataset.
  347. std::shared_ptr<SkipDataset> Dataset::Skip(int32_t count) {
  348. auto ds = std::make_shared<SkipDataset>(count);
  349. // Call derived class validation method.
  350. if (!ds->ValidateParams()) {
  351. return nullptr;
  352. }
  353. ds->children.push_back(shared_from_this());
  354. return ds;
  355. }
  356. // Function to create a TakeDataset.
  357. std::shared_ptr<Dataset> Dataset::Take(int32_t count) {
  358. // If count is greater than the number of element in dataset or equal to -1,
  359. // all the element in dataset will be taken
  360. if (count == -1) {
  361. return shared_from_this();
  362. }
  363. auto ds = std::make_shared<TakeDataset>(count);
  364. // Call derived class validation method.
  365. if (!ds->ValidateParams()) {
  366. return nullptr;
  367. }
  368. ds->children.push_back(shared_from_this());
  369. return ds;
  370. }
  371. // Function to create a Zip dataset
  372. std::shared_ptr<ZipDataset> Dataset::Zip(const std::vector<std::shared_ptr<Dataset>> &datasets) {
  373. // Default values
  374. auto ds = std::make_shared<ZipDataset>(datasets);
  375. ds->children.push_back(shared_from_this());
  376. return ds->ValidateParams() ? ds : nullptr;
  377. }
  378. SchemaObj::SchemaObj(const std::string &schema_file) : schema_file_(schema_file), num_rows_(0), dataset_type_("") {}
  379. // SchemaObj init function
  380. bool SchemaObj::init() {
  381. if (schema_file_ != "") {
  382. Path schema_file(schema_file_);
  383. if (!schema_file.Exists()) {
  384. MS_LOG(ERROR) << "The file " << schema_file << " does not exist or permission denied!";
  385. return false;
  386. }
  387. nlohmann::json js;
  388. try {
  389. std::ifstream in(schema_file_);
  390. in >> js;
  391. if (js.find("columns") == js.end()) {
  392. MS_LOG(ERROR) << "\"columns\" node is required in the schema json file.";
  393. return false;
  394. }
  395. } catch (const std::exception &err) {
  396. MS_LOG(ERROR) << "Schema file failed to load";
  397. return false;
  398. }
  399. return from_json(js);
  400. }
  401. return true;
  402. }
  403. // Function to add a column to schema with a mstype de_type
  404. bool SchemaObj::add_column(std::string name, TypeId de_type, std::vector<int32_t> shape) {
  405. nlohmann::json new_column;
  406. new_column["name"] = name;
  407. // if de_type is mstype
  408. DataType data_type = dataset::MSTypeToDEType(de_type);
  409. new_column["type"] = data_type.ToString();
  410. if (shape.size() > 0) {
  411. new_column["shape"] = shape;
  412. new_column["rank"] = shape.size();
  413. } else {
  414. new_column["rank"] = 1;
  415. }
  416. columns_.push_back(new_column);
  417. return true;
  418. }
  419. // Function to add a column to schema with a string de_type
  420. bool SchemaObj::add_column(std::string name, std::string de_type, std::vector<int32_t> shape) {
  421. nlohmann::json new_column;
  422. new_column["name"] = name;
  423. DataType data_type(de_type);
  424. new_column["type"] = data_type.ToString();
  425. if (shape.size() > 0) {
  426. new_column["shape"] = shape;
  427. new_column["rank"] = shape.size();
  428. } else {
  429. new_column["rank"] = 1;
  430. }
  431. columns_.push_back(new_column);
  432. return true;
  433. }
  434. std::string SchemaObj::to_json() {
  435. nlohmann::json json_file;
  436. json_file["columns"] = columns_;
  437. if (dataset_type_ != "") {
  438. json_file["datasetType"] = dataset_type_;
  439. }
  440. if (num_rows_ > 0) {
  441. json_file["numRows"] = num_rows_;
  442. }
  443. return json_file.dump(2);
  444. }
  445. bool SchemaObj::parse_column(nlohmann::json columns) {
  446. std::string name, de_type;
  447. std::vector<int32_t> shape;
  448. columns_.clear();
  449. if (columns.type() == nlohmann::json::value_t::array) {
  450. // reference to python list
  451. for (auto column : columns) {
  452. auto key_name = column.find("name");
  453. if (key_name == column.end()) {
  454. MS_LOG(ERROR) << "Column's name is missing";
  455. return false;
  456. }
  457. name = *key_name;
  458. auto key_type = column.find("type");
  459. if (key_type == column.end()) {
  460. MS_LOG(ERROR) << "Column's type is missing";
  461. return false;
  462. }
  463. de_type = *key_type;
  464. shape.clear();
  465. auto key_shape = column.find("shape");
  466. if (key_shape != column.end()) {
  467. shape.insert(shape.end(), (*key_shape).begin(), (*key_shape).end());
  468. }
  469. if (!add_column(name, de_type, shape)) {
  470. return false;
  471. }
  472. }
  473. } else if (columns.type() == nlohmann::json::value_t::object) {
  474. for (const auto &it_child : columns.items()) {
  475. name = it_child.key();
  476. auto key_type = it_child.value().find("type");
  477. if (key_type == it_child.value().end()) {
  478. MS_LOG(ERROR) << "Column's type is missing";
  479. return false;
  480. }
  481. de_type = *key_type;
  482. shape.clear();
  483. auto key_shape = it_child.value().find("shape");
  484. if (key_shape != it_child.value().end()) {
  485. shape.insert(shape.end(), (*key_shape).begin(), (*key_shape).end());
  486. }
  487. if (!add_column(name, de_type, shape)) {
  488. return false;
  489. }
  490. }
  491. } else {
  492. MS_LOG(ERROR) << "columns must be dict or list, columns contain name, type, shape(optional).";
  493. return false;
  494. }
  495. return true;
  496. }
  497. bool SchemaObj::from_json(nlohmann::json json_obj) {
  498. for (const auto &it_child : json_obj.items()) {
  499. if (it_child.key() == "datasetType") {
  500. dataset_type_ = it_child.value();
  501. } else if (it_child.key() == "numRows") {
  502. num_rows_ = it_child.value();
  503. } else if (it_child.key() == "columns") {
  504. if (!parse_column(it_child.value())) {
  505. MS_LOG(ERROR) << "parse columns failed";
  506. return false;
  507. }
  508. } else {
  509. MS_LOG(ERROR) << "Unknown field " << it_child.key();
  510. return false;
  511. }
  512. }
  513. if (columns_.empty()) {
  514. MS_LOG(ERROR) << "Columns are missing.";
  515. return false;
  516. }
  517. if (num_rows_ <= 0) {
  518. MS_LOG(ERROR) << "numRows must be greater than 0";
  519. return false;
  520. }
  521. return true;
  522. }
  523. // OTHER FUNCTIONS
  524. // Helper function to compute a default shuffle size
  525. Status ComputeShuffleSize(int64_t num_files, int64_t num_devices, int64_t num_rows, int64_t total_rows,
  526. int64_t *shuffle_size) {
  527. const int64_t average_files_multiplier = 4;
  528. const int64_t shuffle_max = 10000;
  529. int64_t avg_rows_per_file = 0;
  530. // Adjust the num rows per shard if sharding was given
  531. if (num_devices > 0) {
  532. if (num_rows % num_devices == 0) {
  533. num_rows = num_rows / num_devices;
  534. } else {
  535. num_rows = (num_rows / num_devices) + 1;
  536. }
  537. }
  538. // Cap based on total rows directive. Some ops do not have this and give value of 0.
  539. if (total_rows > 0) {
  540. num_rows = std::min(num_rows, total_rows);
  541. }
  542. // get the average per file
  543. CHECK_FAIL_RETURN_UNEXPECTED(num_files != 0, "The size of dataset_files must greater than 0.");
  544. avg_rows_per_file = num_rows / num_files;
  545. *shuffle_size = std::max(avg_rows_per_file * average_files_multiplier, shuffle_max);
  546. return Status::OK();
  547. }
  548. // Helper function to inject a shuffle operator over top of current operator being built
  549. Status AddShuffleOp(int64_t num_files, int64_t num_devices, int64_t num_rows, int64_t total_rows,
  550. int32_t connector_que_size, int32_t rows_per_buffer, std::shared_ptr<DatasetOp> *shuffle_op) {
  551. std::shared_ptr<ShuffleOp> new_shuffle_op = nullptr;
  552. int64_t shuffle_size = 0;
  553. RETURN_EMPTY_IF_ERROR(ComputeShuffleSize(num_files, num_devices, num_rows, total_rows, &shuffle_size));
  554. MS_LOG(INFO) << "Dataset::AddShuffleOp - num_rows: " << num_rows << ", shuffle_size: " << shuffle_size;
  555. // Add the shuffle op
  556. *shuffle_op = std::make_shared<ShuffleOp>(shuffle_size, GetSeed(), connector_que_size, true, rows_per_buffer);
  557. return Status::OK();
  558. }
  559. // Helper function to validate dataset directory parameter
  560. bool ValidateDatasetDirParam(const std::string &dataset_name, std::string dataset_dir) {
  561. if (dataset_dir.empty()) {
  562. MS_LOG(ERROR) << dataset_name << ": dataset_dir is not specified.";
  563. return false;
  564. }
  565. Path dir(dataset_dir);
  566. if (!dir.IsDirectory()) {
  567. MS_LOG(ERROR) << dataset_name << ": dataset_dir: [" << dataset_dir << "] is an invalid directory path.";
  568. return false;
  569. }
  570. if (access(dataset_dir.c_str(), R_OK) == -1) {
  571. MS_LOG(ERROR) << dataset_name << ": No access to specified dataset path: " << dataset_dir;
  572. return false;
  573. }
  574. return true;
  575. }
  576. // Helper function to validate dataset dataset files parameter
  577. bool ValidateDatasetFilesParam(const std::string &dataset_name, const std::vector<std::string> &dataset_files) {
  578. if (dataset_files.empty()) {
  579. MS_LOG(ERROR) << dataset_name << ": dataset_files is not specified.";
  580. return false;
  581. }
  582. for (auto f : dataset_files) {
  583. Path dataset_file(f);
  584. if (!dataset_file.Exists()) {
  585. MS_LOG(ERROR) << dataset_name << ": dataset file: [" << f << "] is invalid or does not exist.";
  586. return false;
  587. }
  588. }
  589. return true;
  590. }
  591. // Helper function to validate dataset num_shards and shard_id parameters
  592. bool ValidateDatasetShardParams(const std::string &dataset_name, int32_t num_shards, int32_t shard_id) {
  593. if (num_shards <= 0) {
  594. MS_LOG(ERROR) << dataset_name << ": Invalid num_shards: " << num_shards;
  595. return false;
  596. }
  597. if (shard_id < 0 || shard_id >= num_shards) {
  598. MS_LOG(ERROR) << dataset_name << ": Invalid input, shard_id: " << shard_id << ", num_shards: " << num_shards;
  599. return false;
  600. }
  601. return true;
  602. }
  603. // Helper function to validate dataset sampler parameter
  604. bool ValidateDatasetSampler(const std::string &dataset_name, const std::shared_ptr<SamplerObj> &sampler) {
  605. if (sampler == nullptr) {
  606. MS_LOG(ERROR) << dataset_name << ": Sampler is not constructed correctly, sampler: nullptr";
  607. return false;
  608. }
  609. return true;
  610. }
  611. bool ValidateStringValue(const std::string &str, const std::unordered_set<std::string> &valid_strings) {
  612. if (valid_strings.find(str) == valid_strings.end()) {
  613. std::string mode;
  614. mode = std::accumulate(valid_strings.begin(), valid_strings.end(), mode,
  615. [](std::string a, std::string b) { return std::move(a) + " " + std::move(b); });
  616. MS_LOG(ERROR) << str << " does not match any mode in [" + mode + " ]";
  617. return false;
  618. }
  619. return true;
  620. }
  621. // Helper function to validate dataset input/output column parameter
  622. bool ValidateDatasetColumnParam(const std::string &dataset_name, const std::string &column_param,
  623. const std::vector<std::string> &columns) {
  624. if (columns.empty()) {
  625. MS_LOG(ERROR) << dataset_name << ":" << column_param << " should not be empty string";
  626. return false;
  627. }
  628. for (uint32_t i = 0; i < columns.size(); ++i) {
  629. if (columns[i].empty()) {
  630. MS_LOG(ERROR) << dataset_name << ":" << column_param << "[" << i << "] should not be empty";
  631. return false;
  632. }
  633. }
  634. std::set<std::string> columns_set(columns.begin(), columns.end());
  635. if (columns_set.size() != columns.size()) {
  636. MS_LOG(ERROR) << dataset_name << ":" << column_param << ": Every column name should not be same with others";
  637. return false;
  638. }
  639. return true;
  640. }
  641. /* ####################################### Derived Dataset classes ################################# */
  642. // DERIVED DATASET CLASSES LEAF-NODE DATASETS
  643. // (In alphabetical order)
  644. // Constructor for AlbumDataset
  645. AlbumDataset::AlbumDataset(const std::string &dataset_dir, const std::string &data_schema,
  646. const std::vector<std::string> &column_names, bool decode,
  647. const std::shared_ptr<SamplerObj> &sampler)
  648. : dataset_dir_(dataset_dir),
  649. schema_path_(data_schema),
  650. column_names_(column_names),
  651. decode_(decode),
  652. sampler_(sampler) {}
  653. bool AlbumDataset::ValidateParams() {
  654. if (!ValidateDatasetDirParam("AlbumDataset", dataset_dir_)) {
  655. return false;
  656. }
  657. if (!ValidateDatasetFilesParam("AlbumDataset", {schema_path_})) {
  658. return false;
  659. }
  660. if (!ValidateDatasetSampler("AlbumDataset", sampler_)) {
  661. return false;
  662. }
  663. if (!column_names_.empty()) {
  664. if (!ValidateDatasetColumnParam("AlbumDataset", "column_names", column_names_)) {
  665. return false;
  666. }
  667. }
  668. return true;
  669. }
  670. // Function to build AlbumDataset
  671. std::vector<std::shared_ptr<DatasetOp>> AlbumDataset::Build() {
  672. // A vector containing shared pointer to the Dataset Ops that this object will create
  673. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  674. auto schema = std::make_unique<DataSchema>();
  675. RETURN_EMPTY_IF_ERROR(schema->LoadSchemaFile(schema_path_, column_names_));
  676. // Argument that is not exposed to user in the API.
  677. std::set<std::string> extensions = {};
  678. node_ops.push_back(std::make_shared<AlbumOp>(num_workers_, rows_per_buffer_, dataset_dir_, connector_que_size_,
  679. decode_, extensions, std::move(schema), std::move(sampler_->Build())));
  680. return node_ops;
  681. }
  682. // Constructor for CelebADataset
  683. CelebADataset::CelebADataset(const std::string &dataset_dir, const std::string &usage,
  684. const std::shared_ptr<SamplerObj> &sampler, const bool &decode,
  685. const std::set<std::string> &extensions)
  686. : dataset_dir_(dataset_dir), usage_(usage), sampler_(sampler), decode_(decode), extensions_(extensions) {}
  687. bool CelebADataset::ValidateParams() {
  688. return ValidateDatasetDirParam("CelebADataset", dataset_dir_) && ValidateDatasetSampler("CelebADataset", sampler_) &&
  689. ValidateStringValue(usage_, {"all", "train", "valid", "test"});
  690. }
  691. // Function to build CelebADataset
  692. std::vector<std::shared_ptr<DatasetOp>> CelebADataset::Build() {
  693. // A vector containing shared pointer to the Dataset Ops that this object will create
  694. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  695. std::unique_ptr<DataSchema> schema = std::make_unique<DataSchema>();
  696. RETURN_EMPTY_IF_ERROR(
  697. schema->AddColumn(ColDescriptor("image", DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  698. // label is like this:0 1 0 0 1......
  699. RETURN_EMPTY_IF_ERROR(
  700. schema->AddColumn(ColDescriptor("attr", DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  701. node_ops.push_back(std::make_shared<CelebAOp>(num_workers_, rows_per_buffer_, dataset_dir_, connector_que_size_,
  702. decode_, usage_, extensions_, std::move(schema),
  703. std::move(sampler_->Build())));
  704. return node_ops;
  705. }
  706. // Constructor for Cifar10Dataset
  707. Cifar10Dataset::Cifar10Dataset(const std::string &dataset_dir, const std::string &usage,
  708. std::shared_ptr<SamplerObj> sampler)
  709. : dataset_dir_(dataset_dir), usage_(usage), sampler_(sampler) {}
  710. bool Cifar10Dataset::ValidateParams() {
  711. return ValidateDatasetDirParam("Cifar10Dataset", dataset_dir_) &&
  712. ValidateDatasetSampler("Cifar10Dataset", sampler_) && ValidateStringValue(usage_, {"train", "test", "all"});
  713. }
  714. // Function to build CifarOp for Cifar10
  715. std::vector<std::shared_ptr<DatasetOp>> Cifar10Dataset::Build() {
  716. // A vector containing shared pointer to the Dataset Ops that this object will create
  717. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  718. // Do internal Schema generation.
  719. auto schema = std::make_unique<DataSchema>();
  720. RETURN_EMPTY_IF_ERROR(schema->AddColumn(ColDescriptor("image", DataType(DataType::DE_UINT8), TensorImpl::kCv, 1)));
  721. TensorShape scalar = TensorShape::CreateScalar();
  722. RETURN_EMPTY_IF_ERROR(
  723. schema->AddColumn(ColDescriptor("label", DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 0, &scalar)));
  724. node_ops.push_back(std::make_shared<CifarOp>(CifarOp::CifarType::kCifar10, usage_, num_workers_, rows_per_buffer_,
  725. dataset_dir_, connector_que_size_, std::move(schema),
  726. std::move(sampler_->Build())));
  727. return node_ops;
  728. }
  729. // Constructor for Cifar100Dataset
  730. Cifar100Dataset::Cifar100Dataset(const std::string &dataset_dir, const std::string &usage,
  731. std::shared_ptr<SamplerObj> sampler)
  732. : dataset_dir_(dataset_dir), usage_(usage), sampler_(sampler) {}
  733. bool Cifar100Dataset::ValidateParams() {
  734. return ValidateDatasetDirParam("Cifar100Dataset", dataset_dir_) &&
  735. ValidateDatasetSampler("Cifar100Dataset", sampler_) && ValidateStringValue(usage_, {"train", "test", "all"});
  736. }
  737. // Function to build CifarOp for Cifar100
  738. std::vector<std::shared_ptr<DatasetOp>> Cifar100Dataset::Build() {
  739. // A vector containing shared pointer to the Dataset Ops that this object will create
  740. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  741. // Do internal Schema generation.
  742. auto schema = std::make_unique<DataSchema>();
  743. RETURN_EMPTY_IF_ERROR(schema->AddColumn(ColDescriptor("image", DataType(DataType::DE_UINT8), TensorImpl::kCv, 1)));
  744. TensorShape scalar = TensorShape::CreateScalar();
  745. RETURN_EMPTY_IF_ERROR(
  746. schema->AddColumn(ColDescriptor("coarse_label", DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 0, &scalar)));
  747. RETURN_EMPTY_IF_ERROR(
  748. schema->AddColumn(ColDescriptor("fine_label", DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 0, &scalar)));
  749. node_ops.push_back(std::make_shared<CifarOp>(CifarOp::CifarType::kCifar100, usage_, num_workers_, rows_per_buffer_,
  750. dataset_dir_, connector_que_size_, std::move(schema),
  751. std::move(sampler_->Build())));
  752. return node_ops;
  753. }
  754. // Constructor for CLUEDataset
  755. CLUEDataset::CLUEDataset(const std::vector<std::string> clue_files, std::string task, std::string usage,
  756. int64_t num_samples, ShuffleMode shuffle, int32_t num_shards, int32_t shard_id)
  757. : dataset_files_(clue_files),
  758. task_(task),
  759. usage_(usage),
  760. num_samples_(num_samples),
  761. shuffle_(shuffle),
  762. num_shards_(num_shards),
  763. shard_id_(shard_id) {}
  764. bool CLUEDataset::ValidateParams() {
  765. if (!ValidateDatasetFilesParam("CLUEDataset", dataset_files_)) {
  766. return false;
  767. }
  768. std::vector<std::string> task_list = {"AFQMC", "TNEWS", "IFLYTEK", "CMNLI", "WSC", "CSL"};
  769. std::vector<std::string> usage_list = {"train", "test", "eval"};
  770. if (find(task_list.begin(), task_list.end(), task_) == task_list.end()) {
  771. MS_LOG(ERROR) << "task should be AFQMC, TNEWS, IFLYTEK, CMNLI, WSC or CSL.";
  772. return false;
  773. }
  774. if (find(usage_list.begin(), usage_list.end(), usage_) == usage_list.end()) {
  775. MS_LOG(ERROR) << "usage should be train, test or eval.";
  776. return false;
  777. }
  778. if (num_samples_ < 0) {
  779. MS_LOG(ERROR) << "CLUEDataset: Invalid number of samples: " << num_samples_;
  780. return false;
  781. }
  782. if (!ValidateDatasetShardParams("CLUEDataset", num_shards_, shard_id_)) {
  783. return false;
  784. }
  785. return true;
  786. }
  787. // Function to split string based on a character delimiter
  788. std::vector<std::string> CLUEDataset::split(const std::string &s, char delim) {
  789. std::vector<std::string> res;
  790. std::stringstream ss(s);
  791. std::string item;
  792. while (getline(ss, item, delim)) {
  793. res.push_back(item);
  794. }
  795. return res;
  796. }
  797. // Function to build CLUEDataset
  798. std::vector<std::shared_ptr<DatasetOp>> CLUEDataset::Build() {
  799. // A vector containing shared pointer to the Dataset Ops that this object will create
  800. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  801. std::map<std::string, std::string> key_map;
  802. if (task_ == "AFQMC") {
  803. if (usage_ == "train") {
  804. key_map["sentence1"] = "sentence1";
  805. key_map["sentence2"] = "sentence2";
  806. key_map["label"] = "label";
  807. } else if (usage_ == "test") {
  808. key_map["id"] = "id";
  809. key_map["sentence1"] = "sentence1";
  810. key_map["sentence2"] = "sentence2";
  811. } else if (usage_ == "eval") {
  812. key_map["sentence1"] = "sentence1";
  813. key_map["sentence2"] = "sentence2";
  814. key_map["label"] = "label";
  815. }
  816. } else if (task_ == "CMNLI") {
  817. if (usage_ == "train") {
  818. key_map["sentence1"] = "sentence1";
  819. key_map["sentence2"] = "sentence2";
  820. key_map["label"] = "label";
  821. } else if (usage_ == "test") {
  822. key_map["id"] = "id";
  823. key_map["sentence1"] = "sentence1";
  824. key_map["sentence2"] = "sentence2";
  825. } else if (usage_ == "eval") {
  826. key_map["sentence1"] = "sentence1";
  827. key_map["sentence2"] = "sentence2";
  828. key_map["label"] = "label";
  829. }
  830. } else if (task_ == "CSL") {
  831. if (usage_ == "train") {
  832. key_map["id"] = "id";
  833. key_map["abst"] = "abst";
  834. key_map["keyword"] = "keyword";
  835. key_map["label"] = "label";
  836. } else if (usage_ == "test") {
  837. key_map["id"] = "id";
  838. key_map["abst"] = "abst";
  839. key_map["keyword"] = "keyword";
  840. } else if (usage_ == "eval") {
  841. key_map["id"] = "id";
  842. key_map["abst"] = "abst";
  843. key_map["keyword"] = "keyword";
  844. key_map["label"] = "label";
  845. }
  846. } else if (task_ == "IFLYTEK") {
  847. if (usage_ == "train") {
  848. key_map["label"] = "label";
  849. key_map["label_des"] = "label_des";
  850. key_map["sentence"] = "sentence";
  851. } else if (usage_ == "test") {
  852. key_map["id"] = "id";
  853. key_map["sentence"] = "sentence";
  854. } else if (usage_ == "eval") {
  855. key_map["label"] = "label";
  856. key_map["label_des"] = "label_des";
  857. key_map["sentence"] = "sentence";
  858. }
  859. } else if (task_ == "TNEWS") {
  860. if (usage_ == "train") {
  861. key_map["label"] = "label";
  862. key_map["label_desc"] = "label_desc";
  863. key_map["sentence"] = "sentence";
  864. key_map["keywords"] = "keywords";
  865. } else if (usage_ == "test") {
  866. key_map["id"] = "id";
  867. key_map["sentence"] = "sentence";
  868. key_map["keywords"] = "keywords";
  869. } else if (usage_ == "eval") {
  870. key_map["label"] = "label";
  871. key_map["label_desc"] = "label_desc";
  872. key_map["sentence"] = "sentence";
  873. key_map["keywords"] = "keywords";
  874. }
  875. } else if (task_ == "WSC") {
  876. if (usage_ == "train") {
  877. key_map["span1_index"] = "target/span1_index";
  878. key_map["span2_index"] = "target/span2_index";
  879. key_map["span1_text"] = "target/span1_text";
  880. key_map["span2_text"] = "target/span2_text";
  881. key_map["idx"] = "idx";
  882. key_map["label"] = "label";
  883. key_map["text"] = "text";
  884. } else if (usage_ == "test") {
  885. key_map["span1_index"] = "target/span1_index";
  886. key_map["span2_index"] = "target/span2_index";
  887. key_map["span1_text"] = "target/span1_text";
  888. key_map["span2_text"] = "target/span2_text";
  889. key_map["idx"] = "idx";
  890. key_map["text"] = "text";
  891. } else if (usage_ == "eval") {
  892. key_map["span1_index"] = "target/span1_index";
  893. key_map["span2_index"] = "target/span2_index";
  894. key_map["span1_text"] = "target/span1_text";
  895. key_map["span2_text"] = "target/span2_text";
  896. key_map["idx"] = "idx";
  897. key_map["label"] = "label";
  898. key_map["text"] = "text";
  899. }
  900. }
  901. ColKeyMap ck_map;
  902. for (auto &p : key_map) {
  903. ck_map.insert({p.first, split(p.second, '/')});
  904. }
  905. bool shuffle_files = (shuffle_ == ShuffleMode::kGlobal || shuffle_ == ShuffleMode::kFiles);
  906. // Sort the dataset files in a lexicographical order
  907. std::vector<std::string> sorted_dataset_files = dataset_files_;
  908. std::sort(sorted_dataset_files.begin(), sorted_dataset_files.end());
  909. std::shared_ptr<ClueOp> clue_op =
  910. std::make_shared<ClueOp>(num_workers_, rows_per_buffer_, num_samples_, worker_connector_size_, ck_map,
  911. sorted_dataset_files, connector_que_size_, shuffle_files, num_shards_, shard_id_, nullptr);
  912. RETURN_EMPTY_IF_ERROR(clue_op->Init());
  913. if (shuffle_ == ShuffleMode::kGlobal) {
  914. // Inject ShuffleOp
  915. std::shared_ptr<DatasetOp> shuffle_op = nullptr;
  916. int64_t num_rows = 0;
  917. // First, get the number of rows in the dataset
  918. RETURN_EMPTY_IF_ERROR(ClueOp::CountAllFileRows(sorted_dataset_files, &num_rows));
  919. // Add the shuffle op after this op
  920. RETURN_EMPTY_IF_ERROR(AddShuffleOp(sorted_dataset_files.size(), num_shards_, num_rows, 0, connector_que_size_,
  921. rows_per_buffer_, &shuffle_op));
  922. node_ops.push_back(shuffle_op);
  923. }
  924. node_ops.push_back(clue_op);
  925. return node_ops;
  926. }
  927. // Constructor for CocoDataset
  928. CocoDataset::CocoDataset(const std::string &dataset_dir, const std::string &annotation_file, const std::string &task,
  929. const bool &decode, const std::shared_ptr<SamplerObj> &sampler)
  930. : dataset_dir_(dataset_dir), annotation_file_(annotation_file), task_(task), decode_(decode), sampler_(sampler) {}
  931. bool CocoDataset::ValidateParams() {
  932. if (!ValidateDatasetDirParam("CocoDataset", dataset_dir_)) {
  933. return false;
  934. }
  935. if (!ValidateDatasetSampler("CocoDataset", sampler_)) {
  936. return false;
  937. }
  938. Path annotation_file(annotation_file_);
  939. if (!annotation_file.Exists()) {
  940. MS_LOG(ERROR) << "annotation_file is invalid or not exist";
  941. return false;
  942. }
  943. std::set<std::string> task_list = {"Detection", "Stuff", "Panoptic", "Keypoint"};
  944. auto task_iter = task_list.find(task_);
  945. if (task_iter == task_list.end()) {
  946. MS_LOG(ERROR) << "Invalid task type";
  947. return false;
  948. }
  949. return true;
  950. }
  951. // Function to build CocoDataset
  952. std::vector<std::shared_ptr<DatasetOp>> CocoDataset::Build() {
  953. // A vector containing shared pointer to the Dataset Ops that this object will create
  954. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  955. CocoOp::TaskType task_type;
  956. if (task_ == "Detection") {
  957. task_type = CocoOp::TaskType::Detection;
  958. } else if (task_ == "Stuff") {
  959. task_type = CocoOp::TaskType::Stuff;
  960. } else if (task_ == "Keypoint") {
  961. task_type = CocoOp::TaskType::Keypoint;
  962. } else if (task_ == "Panoptic") {
  963. task_type = CocoOp::TaskType::Panoptic;
  964. }
  965. std::unique_ptr<DataSchema> schema = std::make_unique<DataSchema>();
  966. RETURN_EMPTY_IF_ERROR(
  967. schema->AddColumn(ColDescriptor(std::string("image"), DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  968. switch (task_type) {
  969. case CocoOp::TaskType::Detection:
  970. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  971. ColDescriptor(std::string("bbox"), DataType(DataType::DE_FLOAT32), TensorImpl::kFlexible, 1)));
  972. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  973. ColDescriptor(std::string("category_id"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  974. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  975. ColDescriptor(std::string("iscrowd"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  976. break;
  977. case CocoOp::TaskType::Stuff:
  978. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  979. ColDescriptor(std::string("segmentation"), DataType(DataType::DE_FLOAT32), TensorImpl::kFlexible, 1)));
  980. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  981. ColDescriptor(std::string("iscrowd"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  982. break;
  983. case CocoOp::TaskType::Keypoint:
  984. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  985. ColDescriptor(std::string("keypoints"), DataType(DataType::DE_FLOAT32), TensorImpl::kFlexible, 1)));
  986. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  987. ColDescriptor(std::string("num_keypoints"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  988. break;
  989. case CocoOp::TaskType::Panoptic:
  990. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  991. ColDescriptor(std::string("bbox"), DataType(DataType::DE_FLOAT32), TensorImpl::kFlexible, 1)));
  992. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  993. ColDescriptor(std::string("category_id"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  994. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  995. ColDescriptor(std::string("iscrowd"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  996. RETURN_EMPTY_IF_ERROR(
  997. schema->AddColumn(ColDescriptor(std::string("area"), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  998. break;
  999. default:
  1000. MS_LOG(ERROR) << "CocoDataset::Build : Invalid task type";
  1001. return {};
  1002. }
  1003. std::shared_ptr<CocoOp> op =
  1004. std::make_shared<CocoOp>(task_type, dataset_dir_, annotation_file_, num_workers_, rows_per_buffer_,
  1005. connector_que_size_, decode_, std::move(schema), std::move(sampler_->Build()));
  1006. node_ops.push_back(op);
  1007. return node_ops;
  1008. }
  1009. // Constructor for CSVDataset
  1010. CSVDataset::CSVDataset(const std::vector<std::string> &csv_files, char field_delim,
  1011. const std::vector<std::shared_ptr<CsvBase>> &column_defaults,
  1012. const std::vector<std::string> &column_names, int64_t num_samples, ShuffleMode shuffle,
  1013. int32_t num_shards, int32_t shard_id)
  1014. : dataset_files_(csv_files),
  1015. field_delim_(field_delim),
  1016. column_defaults_(column_defaults),
  1017. column_names_(column_names),
  1018. num_samples_(num_samples),
  1019. shuffle_(shuffle),
  1020. num_shards_(num_shards),
  1021. shard_id_(shard_id) {}
  1022. bool CSVDataset::ValidateParams() {
  1023. if (!ValidateDatasetFilesParam("CSVDataset", dataset_files_)) {
  1024. return false;
  1025. }
  1026. if (field_delim_ == '"' || field_delim_ == '\r' || field_delim_ == '\n') {
  1027. MS_LOG(ERROR) << "CSVDataset: The field delimiter should not be \", \\r, \\n";
  1028. return false;
  1029. }
  1030. if (num_samples_ < 0) {
  1031. MS_LOG(ERROR) << "CSVDataset: Invalid number of samples: " << num_samples_;
  1032. return false;
  1033. }
  1034. if (!ValidateDatasetShardParams("CSVDataset", num_shards_, shard_id_)) {
  1035. return false;
  1036. }
  1037. if (find(column_defaults_.begin(), column_defaults_.end(), nullptr) != column_defaults_.end()) {
  1038. MS_LOG(ERROR) << "CSVDataset: column_default should not be null.";
  1039. return false;
  1040. }
  1041. if (!column_names_.empty()) {
  1042. if (!ValidateDatasetColumnParam("CSVDataset", "column_names", column_names_)) {
  1043. return false;
  1044. }
  1045. }
  1046. return true;
  1047. }
  1048. // Function to build CSVDataset
  1049. std::vector<std::shared_ptr<DatasetOp>> CSVDataset::Build() {
  1050. // A vector containing shared pointer to the Dataset Ops that this object will create
  1051. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1052. bool shuffle_files = (shuffle_ == ShuffleMode::kGlobal || shuffle_ == ShuffleMode::kFiles);
  1053. // Sort the dataset files in a lexicographical order
  1054. std::vector<std::string> sorted_dataset_files = dataset_files_;
  1055. std::sort(sorted_dataset_files.begin(), sorted_dataset_files.end());
  1056. std::vector<std::shared_ptr<CsvOp::BaseRecord>> column_default_list;
  1057. for (auto v : column_defaults_) {
  1058. if (v->type == CsvType::INT) {
  1059. column_default_list.push_back(
  1060. std::make_shared<CsvOp::Record<int>>(CsvOp::INT, std::dynamic_pointer_cast<CsvRecord<int>>(v)->value));
  1061. } else if (v->type == CsvType::FLOAT) {
  1062. column_default_list.push_back(
  1063. std::make_shared<CsvOp::Record<float>>(CsvOp::FLOAT, std::dynamic_pointer_cast<CsvRecord<float>>(v)->value));
  1064. } else if (v->type == CsvType::STRING) {
  1065. column_default_list.push_back(std::make_shared<CsvOp::Record<std::string>>(
  1066. CsvOp::STRING, std::dynamic_pointer_cast<CsvRecord<std::string>>(v)->value));
  1067. }
  1068. }
  1069. std::shared_ptr<CsvOp> csv_op = std::make_shared<CsvOp>(
  1070. sorted_dataset_files, field_delim_, column_default_list, column_names_, num_workers_, rows_per_buffer_,
  1071. num_samples_, worker_connector_size_, connector_que_size_, shuffle_files, num_shards_, shard_id_, nullptr);
  1072. RETURN_EMPTY_IF_ERROR(csv_op->Init());
  1073. if (shuffle_ == ShuffleMode::kGlobal) {
  1074. // Inject ShuffleOp
  1075. std::shared_ptr<DatasetOp> shuffle_op = nullptr;
  1076. int64_t num_rows = 0;
  1077. // First, get the number of rows in the dataset
  1078. RETURN_EMPTY_IF_ERROR(CsvOp::CountAllFileRows(sorted_dataset_files, column_names_.empty(), &num_rows));
  1079. // Add the shuffle op after this op
  1080. RETURN_EMPTY_IF_ERROR(AddShuffleOp(sorted_dataset_files.size(), num_shards_, num_rows, 0, connector_que_size_,
  1081. rows_per_buffer_, &shuffle_op));
  1082. node_ops.push_back(shuffle_op);
  1083. }
  1084. node_ops.push_back(csv_op);
  1085. return node_ops;
  1086. }
  1087. ImageFolderDataset::ImageFolderDataset(std::string dataset_dir, bool decode, std::shared_ptr<SamplerObj> sampler,
  1088. bool recursive, std::set<std::string> extensions,
  1089. std::map<std::string, int32_t> class_indexing)
  1090. : dataset_dir_(dataset_dir),
  1091. decode_(decode),
  1092. sampler_(sampler),
  1093. recursive_(recursive),
  1094. class_indexing_(class_indexing),
  1095. exts_(extensions) {}
  1096. bool ImageFolderDataset::ValidateParams() {
  1097. return ValidateDatasetDirParam("ImageFolderDataset", dataset_dir_) &&
  1098. ValidateDatasetSampler("ImageFolderDataset", sampler_);
  1099. }
  1100. std::vector<std::shared_ptr<DatasetOp>> ImageFolderDataset::Build() {
  1101. // A vector containing shared pointer to the Dataset Ops that this object will create
  1102. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1103. // Do internal Schema generation.
  1104. // This arg is exist in ImageFolderOp, but not externalized (in Python API).
  1105. std::unique_ptr<DataSchema> schema = std::make_unique<DataSchema>();
  1106. TensorShape scalar = TensorShape::CreateScalar();
  1107. RETURN_EMPTY_IF_ERROR(
  1108. schema->AddColumn(ColDescriptor("image", DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  1109. RETURN_EMPTY_IF_ERROR(
  1110. schema->AddColumn(ColDescriptor("label", DataType(DataType::DE_INT32), TensorImpl::kFlexible, 0, &scalar)));
  1111. node_ops.push_back(std::make_shared<ImageFolderOp>(num_workers_, rows_per_buffer_, dataset_dir_, connector_que_size_,
  1112. recursive_, decode_, exts_, class_indexing_, std::move(schema),
  1113. std::move(sampler_->Build())));
  1114. return node_ops;
  1115. }
  1116. #ifndef ENABLE_ANDROID
  1117. ManifestDataset::ManifestDataset(const std::string &dataset_file, const std::string &usage,
  1118. const std::shared_ptr<SamplerObj> &sampler,
  1119. const std::map<std::string, int32_t> &class_indexing, bool decode)
  1120. : dataset_file_(dataset_file), usage_(usage), decode_(decode), class_index_(class_indexing), sampler_(sampler) {}
  1121. bool ManifestDataset::ValidateParams() {
  1122. std::vector<char> forbidden_symbols = {':', '*', '?', '"', '<', '>', '|', '`', '&', '\'', ';'};
  1123. for (char c : dataset_file_) {
  1124. auto p = std::find(forbidden_symbols.begin(), forbidden_symbols.end(), c);
  1125. if (p != forbidden_symbols.end()) {
  1126. MS_LOG(ERROR) << "filename should not contains :*?\"<>|`&;\'";
  1127. return false;
  1128. }
  1129. }
  1130. Path manifest_file(dataset_file_);
  1131. if (!manifest_file.Exists()) {
  1132. MS_LOG(ERROR) << "dataset file: [" << dataset_file_ << "] is invalid or not exist";
  1133. return false;
  1134. }
  1135. if (!ValidateDatasetSampler("ManifestDataset", sampler_)) {
  1136. return false;
  1137. }
  1138. std::vector<std::string> usage_list = {"train", "eval", "inference"};
  1139. if (find(usage_list.begin(), usage_list.end(), usage_) == usage_list.end()) {
  1140. MS_LOG(ERROR) << "usage should be train, eval or inference.";
  1141. return false;
  1142. }
  1143. return true;
  1144. }
  1145. std::vector<std::shared_ptr<DatasetOp>> ManifestDataset::Build() {
  1146. // A vector containing shared pointer to the Dataset Ops that this object will create
  1147. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1148. // Do internal Schema generation.
  1149. auto schema = std::make_unique<DataSchema>();
  1150. RETURN_EMPTY_IF_ERROR(schema->AddColumn(ColDescriptor("image", DataType(DataType::DE_UINT8), TensorImpl::kCv, 1)));
  1151. TensorShape scalar = TensorShape::CreateScalar();
  1152. RETURN_EMPTY_IF_ERROR(
  1153. schema->AddColumn(ColDescriptor("label", DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 0, &scalar)));
  1154. std::shared_ptr<ManifestOp> manifest_op;
  1155. manifest_op =
  1156. std::make_shared<ManifestOp>(num_workers_, rows_per_buffer_, dataset_file_, connector_que_size_, decode_,
  1157. class_index_, std::move(schema), std::move(sampler_->Build()), usage_);
  1158. node_ops.push_back(manifest_op);
  1159. return node_ops;
  1160. }
  1161. #endif
  1162. MnistDataset::MnistDataset(std::string dataset_dir, std::string usage, std::shared_ptr<SamplerObj> sampler)
  1163. : dataset_dir_(dataset_dir), usage_(usage), sampler_(sampler) {}
  1164. bool MnistDataset::ValidateParams() {
  1165. return ValidateStringValue(usage_, {"train", "test", "all"}) &&
  1166. ValidateDatasetDirParam("MnistDataset", dataset_dir_) && ValidateDatasetSampler("MnistDataset", sampler_);
  1167. }
  1168. std::vector<std::shared_ptr<DatasetOp>> MnistDataset::Build() {
  1169. // A vector containing shared pointer to the Dataset Ops that this object will create
  1170. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1171. // Do internal Schema generation.
  1172. auto schema = std::make_unique<DataSchema>();
  1173. RETURN_EMPTY_IF_ERROR(schema->AddColumn(ColDescriptor("image", DataType(DataType::DE_UINT8), TensorImpl::kCv, 1)));
  1174. TensorShape scalar = TensorShape::CreateScalar();
  1175. RETURN_EMPTY_IF_ERROR(
  1176. schema->AddColumn(ColDescriptor("label", DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 0, &scalar)));
  1177. node_ops.push_back(std::make_shared<MnistOp>(usage_, num_workers_, rows_per_buffer_, dataset_dir_,
  1178. connector_que_size_, std::move(schema), std::move(sampler_->Build())));
  1179. return node_ops;
  1180. }
  1181. // ValideParams for RandomDataset
  1182. bool RandomDataset::ValidateParams() {
  1183. if (total_rows_ < 0) {
  1184. MS_LOG(ERROR) << "RandomDataset: total_rows must be greater than or equal 0, now get " << total_rows_;
  1185. return false;
  1186. }
  1187. if (!ValidateDatasetSampler("RandomDataset", sampler_)) {
  1188. return false;
  1189. }
  1190. if (!columns_list_.empty()) {
  1191. if (!ValidateDatasetColumnParam("RandomDataset", "columns_list", columns_list_)) {
  1192. return false;
  1193. }
  1194. }
  1195. return true;
  1196. }
  1197. int32_t RandomDataset::GenRandomInt(int32_t min, int32_t max) {
  1198. std::uniform_int_distribution<int32_t> uniDist(min, max);
  1199. return uniDist(rand_gen_);
  1200. }
  1201. // Build for RandomDataset
  1202. std::vector<std::shared_ptr<DatasetOp>> RandomDataset::Build() {
  1203. // A vector containing shared pointer to the Dataset Ops that this object will create
  1204. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1205. rand_gen_.seed(GetSeed()); // seed the random generator
  1206. // If total rows was not given, then randomly pick a number
  1207. std::shared_ptr<SchemaObj> schema_obj;
  1208. if (!schema_path_.empty()) {
  1209. schema_obj = Schema(schema_path_);
  1210. if (schema_obj == nullptr) {
  1211. return {};
  1212. }
  1213. }
  1214. std::string schema_json_string, schema_file_path;
  1215. if (schema_ != nullptr) {
  1216. schema_->set_dataset_type("Random");
  1217. if (total_rows_ != 0) {
  1218. schema_->set_num_rows(total_rows_);
  1219. }
  1220. schema_json_string = schema_->to_json();
  1221. } else {
  1222. schema_file_path = schema_path_;
  1223. }
  1224. std::unique_ptr<DataSchema> data_schema;
  1225. std::vector<std::string> columns_to_load;
  1226. if (columns_list_.size() > 0) {
  1227. columns_to_load = columns_list_;
  1228. }
  1229. if (!schema_file_path.empty() || !schema_json_string.empty()) {
  1230. data_schema = std::make_unique<DataSchema>();
  1231. if (!schema_file_path.empty()) {
  1232. data_schema->LoadSchemaFile(schema_file_path, columns_to_load);
  1233. } else if (!schema_json_string.empty()) {
  1234. data_schema->LoadSchemaString(schema_json_string, columns_to_load);
  1235. }
  1236. }
  1237. std::shared_ptr<RandomDataOp> op;
  1238. op = std::make_shared<RandomDataOp>(num_workers_, connector_que_size_, rows_per_buffer_, total_rows_,
  1239. std::move(data_schema), std::move(sampler_->Build()));
  1240. node_ops.push_back(op);
  1241. return node_ops;
  1242. }
  1243. // Constructor for TextFileDataset
  1244. TextFileDataset::TextFileDataset(std::vector<std::string> dataset_files, int32_t num_samples, ShuffleMode shuffle,
  1245. int32_t num_shards, int32_t shard_id)
  1246. : dataset_files_(dataset_files),
  1247. num_samples_(num_samples),
  1248. shuffle_(shuffle),
  1249. num_shards_(num_shards),
  1250. shard_id_(shard_id) {}
  1251. bool TextFileDataset::ValidateParams() {
  1252. if (!ValidateDatasetFilesParam("TextFileDataset", dataset_files_)) {
  1253. return false;
  1254. }
  1255. if (num_samples_ < 0) {
  1256. MS_LOG(ERROR) << "TextFileDataset: Invalid number of samples: " << num_samples_;
  1257. return false;
  1258. }
  1259. if (!ValidateDatasetShardParams("TextFileDataset", num_shards_, shard_id_)) {
  1260. return false;
  1261. }
  1262. return true;
  1263. }
  1264. // Function to build TextFileDataset
  1265. std::vector<std::shared_ptr<DatasetOp>> TextFileDataset::Build() {
  1266. // A vector containing shared pointer to the Dataset Ops that this object will create
  1267. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1268. bool shuffle_files = (shuffle_ == ShuffleMode::kGlobal || shuffle_ == ShuffleMode::kFiles);
  1269. // Sort the dataset files in a lexicographical order
  1270. std::vector<std::string> sorted_dataset_files = dataset_files_;
  1271. std::sort(sorted_dataset_files.begin(), sorted_dataset_files.end());
  1272. // Do internal Schema generation.
  1273. auto schema = std::make_unique<DataSchema>();
  1274. RETURN_EMPTY_IF_ERROR(
  1275. schema->AddColumn(ColDescriptor("text", DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  1276. // Create and initalize TextFileOp
  1277. std::shared_ptr<TextFileOp> text_file_op = std::make_shared<TextFileOp>(
  1278. num_workers_, rows_per_buffer_, num_samples_, worker_connector_size_, std::move(schema), sorted_dataset_files,
  1279. connector_que_size_, shuffle_files, num_shards_, shard_id_, nullptr);
  1280. RETURN_EMPTY_IF_ERROR(text_file_op->Init());
  1281. if (shuffle_ == ShuffleMode::kGlobal) {
  1282. // Inject ShuffleOp
  1283. std::shared_ptr<DatasetOp> shuffle_op = nullptr;
  1284. int64_t num_rows = 0;
  1285. // First, get the number of rows in the dataset
  1286. RETURN_EMPTY_IF_ERROR(TextFileOp::CountAllFileRows(sorted_dataset_files, &num_rows));
  1287. // Add the shuffle op after this op
  1288. RETURN_EMPTY_IF_ERROR(AddShuffleOp(sorted_dataset_files.size(), num_shards_, num_rows, 0, connector_que_size_,
  1289. rows_per_buffer_, &shuffle_op));
  1290. node_ops.push_back(shuffle_op);
  1291. }
  1292. // Add TextFileOp
  1293. node_ops.push_back(text_file_op);
  1294. return node_ops;
  1295. }
  1296. #ifndef ENABLE_ANDROID
  1297. // Validator for TFRecordDataset
  1298. bool TFRecordDataset::ValidateParams() { return true; }
  1299. // Function to build TFRecordDataset
  1300. std::vector<std::shared_ptr<DatasetOp>> TFRecordDataset::Build() {
  1301. // A vector containing shared pointer to the Dataset Ops that this object will create
  1302. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1303. // Sort the datasets file in a lexicographical order
  1304. std::vector<std::string> sorted_dir_files = dataset_files_;
  1305. std::sort(sorted_dir_files.begin(), sorted_dir_files.end());
  1306. // Create Schema Object
  1307. std::unique_ptr<DataSchema> data_schema = std::make_unique<DataSchema>();
  1308. if (!schema_path_.empty()) {
  1309. RETURN_EMPTY_IF_ERROR(data_schema->LoadSchemaFile(schema_path_, columns_list_));
  1310. } else if (schema_obj_ != nullptr) {
  1311. std::string schema_json_string = schema_obj_->to_json();
  1312. RETURN_EMPTY_IF_ERROR(data_schema->LoadSchemaString(schema_json_string, columns_list_));
  1313. }
  1314. bool shuffle_files = (shuffle_ == ShuffleMode::kGlobal || shuffle_ == ShuffleMode::kFiles);
  1315. // Create and initialize TFReaderOp
  1316. std::shared_ptr<TFReaderOp> tf_reader_op = std::make_shared<TFReaderOp>(
  1317. num_workers_, worker_connector_size_, rows_per_buffer_, num_samples_, sorted_dir_files, std::move(data_schema),
  1318. connector_que_size_, columns_list_, shuffle_files, num_shards_, shard_id_, shard_equal_rows_, nullptr);
  1319. RETURN_EMPTY_IF_ERROR(tf_reader_op->Init());
  1320. if (shuffle_ == ShuffleMode::kGlobal) {
  1321. // Inject ShuffleOp
  1322. std::shared_ptr<DatasetOp> shuffle_op = nullptr;
  1323. int64_t num_rows = 0;
  1324. // First, get the number of rows in the dataset
  1325. RETURN_EMPTY_IF_ERROR(TFReaderOp::CountTotalRows(&num_rows, sorted_dir_files));
  1326. // Add the shuffle op after this op
  1327. RETURN_EMPTY_IF_ERROR(AddShuffleOp(sorted_dir_files.size(), num_shards_, num_rows, 0, connector_que_size_,
  1328. rows_per_buffer_, &shuffle_op));
  1329. node_ops.push_back(shuffle_op);
  1330. }
  1331. // Add TFReaderOp
  1332. node_ops.push_back(tf_reader_op);
  1333. return node_ops;
  1334. }
  1335. // Constructor for VOCDataset
  1336. VOCDataset::VOCDataset(const std::string &dataset_dir, const std::string &task, const std::string &usage,
  1337. const std::map<std::string, int32_t> &class_indexing, bool decode,
  1338. std::shared_ptr<SamplerObj> sampler)
  1339. : dataset_dir_(dataset_dir),
  1340. task_(task),
  1341. usage_(usage),
  1342. class_index_(class_indexing),
  1343. decode_(decode),
  1344. sampler_(sampler) {}
  1345. bool VOCDataset::ValidateParams() {
  1346. Path dir(dataset_dir_);
  1347. if (!dir.IsDirectory()) {
  1348. MS_LOG(ERROR) << "Invalid dataset path or no dataset path is specified.";
  1349. return false;
  1350. }
  1351. if (!ValidateDatasetSampler("VOCDataset", sampler_)) {
  1352. return false;
  1353. }
  1354. if (task_ == "Segmentation") {
  1355. if (!class_index_.empty()) {
  1356. MS_LOG(ERROR) << "class_indexing is invalid in Segmentation task.";
  1357. return false;
  1358. }
  1359. Path imagesets_file = dir / "ImageSets" / "Segmentation" / usage_ + ".txt";
  1360. if (!imagesets_file.Exists()) {
  1361. MS_LOG(ERROR) << "Invalid mode: " << usage_ << ", file \"" << imagesets_file << "\" does not exist!";
  1362. return false;
  1363. }
  1364. } else if (task_ == "Detection") {
  1365. Path imagesets_file = dir / "ImageSets" / "Main" / usage_ + ".txt";
  1366. if (!imagesets_file.Exists()) {
  1367. MS_LOG(ERROR) << "Invalid mode: " << usage_ << ", file \"" << imagesets_file << "\" does not exist!";
  1368. return false;
  1369. }
  1370. } else {
  1371. MS_LOG(ERROR) << "Invalid task: " << task_;
  1372. return false;
  1373. }
  1374. return true;
  1375. }
  1376. // Function to build VOCDataset
  1377. std::vector<std::shared_ptr<DatasetOp>> VOCDataset::Build() {
  1378. // A vector containing shared pointer to the Dataset Ops that this object will create
  1379. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1380. auto schema = std::make_unique<DataSchema>();
  1381. VOCOp::TaskType task_type_;
  1382. if (task_ == "Segmentation") {
  1383. task_type_ = VOCOp::TaskType::Segmentation;
  1384. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1385. ColDescriptor(std::string(kColumnImage), DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  1386. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1387. ColDescriptor(std::string(kColumnTarget), DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  1388. } else if (task_ == "Detection") {
  1389. task_type_ = VOCOp::TaskType::Detection;
  1390. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1391. ColDescriptor(std::string(kColumnImage), DataType(DataType::DE_UINT8), TensorImpl::kFlexible, 1)));
  1392. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1393. ColDescriptor(std::string(kColumnBbox), DataType(DataType::DE_FLOAT32), TensorImpl::kFlexible, 1)));
  1394. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1395. ColDescriptor(std::string(kColumnLabel), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  1396. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1397. ColDescriptor(std::string(kColumnDifficult), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  1398. RETURN_EMPTY_IF_ERROR(schema->AddColumn(
  1399. ColDescriptor(std::string(kColumnTruncate), DataType(DataType::DE_UINT32), TensorImpl::kFlexible, 1)));
  1400. }
  1401. std::shared_ptr<VOCOp> voc_op;
  1402. voc_op = std::make_shared<VOCOp>(task_type_, usage_, dataset_dir_, class_index_, num_workers_, rows_per_buffer_,
  1403. connector_que_size_, decode_, std::move(schema), std::move(sampler_->Build()));
  1404. node_ops.push_back(voc_op);
  1405. return node_ops;
  1406. }
  1407. #endif
  1408. // DERIVED DATASET CLASSES LEAF-NODE DATASETS
  1409. // (In alphabetical order)
  1410. BatchDataset::BatchDataset(int32_t batch_size, bool drop_remainder, bool pad, std::vector<std::string> cols_to_map,
  1411. std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> pad_map)
  1412. : batch_size_(batch_size),
  1413. drop_remainder_(drop_remainder),
  1414. pad_(pad),
  1415. cols_to_map_(cols_to_map),
  1416. pad_map_(pad_map) {}
  1417. std::vector<std::shared_ptr<DatasetOp>> BatchDataset::Build() {
  1418. // A vector containing shared pointer to the Dataset Ops that this object will create
  1419. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1420. #ifdef ENABLE_PYTHON
  1421. py::function noop;
  1422. node_ops.push_back(std::make_shared<BatchOp>(batch_size_, drop_remainder_, pad_, connector_que_size_, num_workers_,
  1423. cols_to_map_, cols_to_map_, noop, noop, pad_map_));
  1424. #else
  1425. node_ops.push_back(std::make_shared<BatchOp>(batch_size_, drop_remainder_, pad_, connector_que_size_, num_workers_,
  1426. cols_to_map_, pad_map_));
  1427. #endif
  1428. // Until py::function is implemented for C++ API, there is no need for a project op to be inserted after batch
  1429. // because project is only needed when batch op performs per_batch_map. This per_batch_map is a pyfunc
  1430. return node_ops;
  1431. }
  1432. bool BatchDataset::ValidateParams() {
  1433. if (batch_size_ <= 0) {
  1434. MS_LOG(ERROR) << "Batch: batch_size should be positive integer, but got: " << batch_size_;
  1435. return false;
  1436. }
  1437. if (!cols_to_map_.empty()) {
  1438. MS_LOG(ERROR) << "cols_to_map functionality is not implemented in C++; this should be left empty.";
  1439. return false;
  1440. }
  1441. return true;
  1442. }
  1443. #ifndef ENABLE_ANDROID
  1444. BucketBatchByLengthDataset::BucketBatchByLengthDataset(
  1445. const std::vector<std::string> &column_names, const std::vector<int32_t> &bucket_boundaries,
  1446. const std::vector<int32_t> &bucket_batch_sizes, std::function<TensorRow(TensorRow)> element_length_function,
  1447. const std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> &pad_info, bool pad_to_bucket_boundary,
  1448. bool drop_remainder)
  1449. : column_names_(column_names),
  1450. bucket_boundaries_(bucket_boundaries),
  1451. bucket_batch_sizes_(bucket_batch_sizes),
  1452. element_length_function_(element_length_function),
  1453. pad_info_(pad_info),
  1454. pad_to_bucket_boundary_(pad_to_bucket_boundary),
  1455. drop_remainder_(drop_remainder) {}
  1456. std::vector<std::shared_ptr<DatasetOp>> BucketBatchByLengthDataset::Build() {
  1457. // A vector containing shared pointer to the Dataset Ops that this object will create
  1458. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1459. std::shared_ptr<TensorOp> c_func;
  1460. if (element_length_function_ != nullptr) {
  1461. c_func = std::make_shared<CFuncOp>(element_length_function_);
  1462. } else {
  1463. c_func = nullptr;
  1464. }
  1465. node_ops.push_back(std::make_shared<BucketBatchByLengthOp>(column_names_, bucket_boundaries_, bucket_batch_sizes_,
  1466. c_func, pad_info_, pad_to_bucket_boundary_,
  1467. drop_remainder_, connector_que_size_));
  1468. return node_ops;
  1469. }
  1470. bool BucketBatchByLengthDataset::ValidateParams() {
  1471. if (element_length_function_ == nullptr && column_names_.size() != 1) {
  1472. MS_LOG(ERROR) << "BucketBatchByLength: If element_length_function is not specified, exactly one column name "
  1473. "should be passed.";
  1474. return false;
  1475. }
  1476. // Check bucket_boundaries: must be positive and strictly increasing
  1477. if (bucket_boundaries_.empty()) {
  1478. MS_LOG(ERROR) << "BucketBatchByLength: bucket_boundaries cannot be empty.";
  1479. return false;
  1480. }
  1481. for (int i = 0; i < bucket_boundaries_.size(); i++) {
  1482. if (bucket_boundaries_[i] <= 0) {
  1483. MS_LOG(ERROR)
  1484. << "BucketBatchByLength: bucket_boundaries must only contain positive numbers. However, the element at index: "
  1485. << i << " was: " << bucket_boundaries_[i];
  1486. return false;
  1487. }
  1488. if (i > 0 && bucket_boundaries_[i - 1] >= bucket_boundaries_[i]) {
  1489. MS_LOG(ERROR)
  1490. << "BucketBatchByLength: bucket_boundaries must be strictly increasing. However, the elements at index: "
  1491. << i - 1 << " and " << i << " were: " << bucket_boundaries_[i - 1] << " and " << bucket_boundaries_[i]
  1492. << " respectively.";
  1493. return false;
  1494. }
  1495. }
  1496. // Check bucket_batch_sizes: must be positive
  1497. if (bucket_batch_sizes_.empty()) {
  1498. MS_LOG(ERROR) << "BucketBatchByLength: bucket_batch_sizes must be non-empty";
  1499. return false;
  1500. }
  1501. if (bucket_batch_sizes_.size() != bucket_boundaries_.size() + 1) {
  1502. MS_LOG(ERROR) << "BucketBatchByLength: bucket_batch_sizes's size must equal the size of bucket_boundaries + 1";
  1503. return false;
  1504. }
  1505. if (std::any_of(bucket_batch_sizes_.begin(), bucket_batch_sizes_.end(), [](int i) { return i <= 0; })) {
  1506. MS_LOG(ERROR) << "BucketBatchByLength: bucket_batch_sizes must only contain positive numbers.";
  1507. return false;
  1508. }
  1509. return true;
  1510. }
  1511. BuildVocabDataset::BuildVocabDataset(std::shared_ptr<Vocab> vocab, const std::vector<std::string> &columns,
  1512. const std::pair<int64_t, int64_t> &freq_range, int64_t top_k,
  1513. const std::vector<std::string> &special_tokens, bool special_first)
  1514. : vocab_(vocab),
  1515. columns_(columns),
  1516. freq_range_(freq_range),
  1517. top_k_(top_k),
  1518. special_tokens_(special_tokens),
  1519. special_first_(special_first) {}
  1520. // Function to build BuildVocabDataset
  1521. std::vector<std::shared_ptr<DatasetOp>> BuildVocabDataset::Build() {
  1522. // A vector containing shared pointer to the Dataset Ops that this object will create
  1523. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1524. std::shared_ptr<BuildVocabOp> build_vocab_op;
  1525. build_vocab_op = std::make_shared<BuildVocabOp>(vocab_, columns_, freq_range_, top_k_, special_tokens_,
  1526. special_first_, num_workers_, connector_que_size_);
  1527. node_ops.push_back(build_vocab_op);
  1528. return node_ops;
  1529. }
  1530. bool BuildVocabDataset::ValidateParams() {
  1531. if (vocab_ == nullptr) {
  1532. MS_LOG(ERROR) << "BuildVocab: vocab is null.";
  1533. return false;
  1534. }
  1535. if (top_k_ <= 0) {
  1536. MS_LOG(ERROR) << "BuildVocab: top_k shoule be positive, but got: " << top_k_;
  1537. return false;
  1538. }
  1539. if (freq_range_.first < 0 || freq_range_.second > kDeMaxFreq || freq_range_.first > freq_range_.second) {
  1540. MS_LOG(ERROR) << "BuildVocab: requency_range [a,b] should be 0 <= a <= b (a,b are inclusive), "
  1541. << "but got [" << freq_range_.first << ", " << freq_range_.second << "]";
  1542. return false;
  1543. }
  1544. if (!columns_.empty()) {
  1545. if (!ValidateDatasetColumnParam("BuildVocab", "columns", columns_)) {
  1546. return false;
  1547. }
  1548. }
  1549. return true;
  1550. }
  1551. #endif
  1552. // Function to build ConcatOp
  1553. ConcatDataset::ConcatDataset(const std::vector<std::shared_ptr<Dataset>> &datasets) : datasets_(datasets) {
  1554. this->children = datasets_;
  1555. }
  1556. bool ConcatDataset::ValidateParams() {
  1557. if (datasets_.empty()) {
  1558. MS_LOG(ERROR) << "Concat: concatenated datasets are not specified.";
  1559. return false;
  1560. }
  1561. if (find(datasets_.begin(), datasets_.end(), nullptr) != datasets_.end()) {
  1562. MS_LOG(ERROR) << "Concat: concatenated dataset should not be null.";
  1563. return false;
  1564. }
  1565. return true;
  1566. }
  1567. std::vector<std::shared_ptr<DatasetOp>> ConcatDataset::Build() {
  1568. // A vector containing shared pointer to the Dataset Ops that this object will create
  1569. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1570. node_ops.push_back(std::make_shared<ConcatOp>(connector_que_size_));
  1571. return node_ops;
  1572. }
  1573. MapDataset::MapDataset(std::vector<std::shared_ptr<TensorOperation>> operations, std::vector<std::string> input_columns,
  1574. std::vector<std::string> output_columns, const std::vector<std::string> &project_columns)
  1575. : operations_(operations),
  1576. input_columns_(input_columns),
  1577. output_columns_(output_columns),
  1578. project_columns_(project_columns) {}
  1579. std::vector<std::shared_ptr<DatasetOp>> MapDataset::Build() {
  1580. // A vector containing shared pointer to the Dataset Ops that this object will create
  1581. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1582. std::vector<std::shared_ptr<TensorOp>> tensor_ops;
  1583. // Build tensorOp from tensorOperation vector
  1584. // This is to ensure each iterator hold its own copy of the tensorOp objects.
  1585. (void)std::transform(
  1586. operations_.begin(), operations_.end(), std::back_inserter(tensor_ops),
  1587. [](std::shared_ptr<TensorOperation> operation) -> std::shared_ptr<TensorOp> { return operation->Build(); });
  1588. // This parameter will be removed with next rebase
  1589. std::vector<std::string> col_orders;
  1590. auto map_op = std::make_shared<MapOp>(input_columns_, output_columns_, tensor_ops, num_workers_, connector_que_size_);
  1591. if (!project_columns_.empty()) {
  1592. auto project_op = std::make_shared<ProjectOp>(project_columns_);
  1593. node_ops.push_back(project_op);
  1594. }
  1595. node_ops.push_back(map_op);
  1596. return node_ops;
  1597. }
  1598. bool MapDataset::ValidateParams() {
  1599. if (operations_.empty()) {
  1600. MS_LOG(ERROR) << "Map: No operation is specified.";
  1601. return false;
  1602. }
  1603. if (!input_columns_.empty()) {
  1604. if (!ValidateDatasetColumnParam("MapDataset", "input_columns", input_columns_)) {
  1605. return false;
  1606. }
  1607. }
  1608. if (!output_columns_.empty()) {
  1609. if (!ValidateDatasetColumnParam("MapDataset", "output_columns", output_columns_)) {
  1610. return false;
  1611. }
  1612. }
  1613. if (!project_columns_.empty()) {
  1614. if (!ValidateDatasetColumnParam("MapDataset", "project_columns", project_columns_)) {
  1615. return false;
  1616. }
  1617. }
  1618. return true;
  1619. }
  1620. // Function to build ProjectOp
  1621. ProjectDataset::ProjectDataset(const std::vector<std::string> &columns) : columns_(columns) {}
  1622. bool ProjectDataset::ValidateParams() {
  1623. if (columns_.empty()) {
  1624. MS_LOG(ERROR) << "ProjectDataset: No columns are specified.";
  1625. return false;
  1626. }
  1627. if (!ValidateDatasetColumnParam("ProjectDataset", "columns", columns_)) {
  1628. return false;
  1629. }
  1630. return true;
  1631. }
  1632. std::vector<std::shared_ptr<DatasetOp>> ProjectDataset::Build() {
  1633. // A vector containing shared pointer to the Dataset Ops that this object will create
  1634. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1635. node_ops.push_back(std::make_shared<ProjectOp>(columns_));
  1636. return node_ops;
  1637. }
  1638. // Function to build RenameOp
  1639. RenameDataset::RenameDataset(const std::vector<std::string> &input_columns,
  1640. const std::vector<std::string> &output_columns)
  1641. : input_columns_(input_columns), output_columns_(output_columns) {}
  1642. bool RenameDataset::ValidateParams() {
  1643. if (input_columns_.size() != output_columns_.size()) {
  1644. MS_LOG(ERROR) << "RenameDataset: input and output columns must be the same size";
  1645. return false;
  1646. }
  1647. if (!ValidateDatasetColumnParam("RenameDataset", "input_columns", input_columns_) ||
  1648. !ValidateDatasetColumnParam("RenameDataset", "output_columns", output_columns_)) {
  1649. return false;
  1650. }
  1651. return true;
  1652. }
  1653. std::vector<std::shared_ptr<DatasetOp>> RenameDataset::Build() {
  1654. // A vector containing shared pointer to the Dataset Ops that this object will create
  1655. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1656. node_ops.push_back(std::make_shared<RenameOp>(input_columns_, output_columns_, connector_que_size_));
  1657. return node_ops;
  1658. }
  1659. RepeatDataset::RepeatDataset(int32_t count) : repeat_count_(count) {}
  1660. std::vector<std::shared_ptr<DatasetOp>> RepeatDataset::Build() {
  1661. // A vector containing shared pointer to the Dataset Ops that this object will create
  1662. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1663. node_ops.push_back(std::make_shared<RepeatOp>(repeat_count_));
  1664. return node_ops;
  1665. }
  1666. bool RepeatDataset::ValidateParams() {
  1667. if (repeat_count_ <= 0 && repeat_count_ != -1) {
  1668. MS_LOG(ERROR) << "Repeat: repeat_count should be either -1 or positive integer, repeat_count_: " << repeat_count_;
  1669. return false;
  1670. }
  1671. return true;
  1672. }
  1673. // Constructor for ShuffleDataset
  1674. ShuffleDataset::ShuffleDataset(int32_t shuffle_size, bool reset_every_epoch)
  1675. : shuffle_size_(shuffle_size), shuffle_seed_(GetSeed()), reset_every_epoch_(reset_every_epoch) {}
  1676. // Function to build the ShuffleOp
  1677. std::vector<std::shared_ptr<DatasetOp>> ShuffleDataset::Build() {
  1678. // A vector containing shared pointer to the Dataset Ops that this object will create
  1679. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1680. node_ops.push_back(std::make_shared<ShuffleOp>(shuffle_size_, shuffle_seed_, connector_que_size_, reset_every_epoch_,
  1681. rows_per_buffer_));
  1682. return node_ops;
  1683. }
  1684. // Function to validate the parameters for ShuffleDataset
  1685. bool ShuffleDataset::ValidateParams() {
  1686. if (shuffle_size_ <= 1) {
  1687. MS_LOG(ERROR) << "ShuffleDataset: Invalid input, shuffle_size: " << shuffle_size_;
  1688. return false;
  1689. }
  1690. return true;
  1691. }
  1692. // Constructor for SkipDataset
  1693. SkipDataset::SkipDataset(int32_t count) : skip_count_(count) {}
  1694. // Function to build the SkipOp
  1695. std::vector<std::shared_ptr<DatasetOp>> SkipDataset::Build() {
  1696. // A vector containing shared pointer to the Dataset Ops that this object will create
  1697. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1698. node_ops.push_back(std::make_shared<SkipOp>(skip_count_, connector_que_size_));
  1699. return node_ops;
  1700. }
  1701. // Function to validate the parameters for SkipDataset
  1702. bool SkipDataset::ValidateParams() {
  1703. if (skip_count_ <= -1) {
  1704. MS_LOG(ERROR) << "Skip: skip_count should not be negative, skip_count: " << skip_count_;
  1705. return false;
  1706. }
  1707. return true;
  1708. }
  1709. // Constructor for TakeDataset
  1710. TakeDataset::TakeDataset(int32_t count) : take_count_(count) {}
  1711. // Function to build the TakeOp
  1712. std::vector<std::shared_ptr<DatasetOp>> TakeDataset::Build() {
  1713. // A vector containing shared pointer to the Dataset Ops that this object will create
  1714. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1715. node_ops.push_back(std::make_shared<TakeOp>(take_count_, connector_que_size_));
  1716. return node_ops;
  1717. }
  1718. // Function to validate the parameters for TakeDataset
  1719. bool TakeDataset::ValidateParams() {
  1720. if (take_count_ <= 0 && take_count_ != -1) {
  1721. MS_LOG(ERROR) << "Take: take_count should be either -1 or positive integer, take_count: " << take_count_;
  1722. return false;
  1723. }
  1724. return true;
  1725. }
  1726. // Function to build ZipOp
  1727. ZipDataset::ZipDataset(const std::vector<std::shared_ptr<Dataset>> &datasets) : datasets_(datasets) {
  1728. for (auto dataset : datasets_) {
  1729. this->children.push_back(dataset);
  1730. }
  1731. }
  1732. bool ZipDataset::ValidateParams() {
  1733. if (datasets_.empty()) {
  1734. MS_LOG(ERROR) << "Zip: dataset to zip are not specified.";
  1735. return false;
  1736. }
  1737. if (find(datasets_.begin(), datasets_.end(), nullptr) != datasets_.end()) {
  1738. MS_LOG(ERROR) << "ZipDataset: zip dataset should not be null.";
  1739. return false;
  1740. }
  1741. return true;
  1742. }
  1743. std::vector<std::shared_ptr<DatasetOp>> ZipDataset::Build() {
  1744. // A vector containing shared pointer to the Dataset Ops that this object will create
  1745. std::vector<std::shared_ptr<DatasetOp>> node_ops;
  1746. node_ops.push_back(std::make_shared<ZipOp>(rows_per_buffer_, connector_que_size_));
  1747. return node_ops;
  1748. }
  1749. } // namespace api
  1750. } // namespace dataset
  1751. } // namespace mindspore