You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_datasets_ag_news.py 6.0 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. # Copyright 2021 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import mindspore.dataset as ds
  16. FILE_DIR = '../data/dataset/testAGNews'
  17. def test_ag_news_dataset_basic():
  18. """
  19. Feature: Test AG News Dataset.
  20. Description: read data from a single file.
  21. Expectation: the data is processed successfully.
  22. """
  23. buffer = []
  24. data = ds.AGNewsDataset(FILE_DIR, usage='all', shuffle=False)
  25. data = data.repeat(2)
  26. data = data.skip(2)
  27. for d in data.create_dict_iterator(num_epochs=1, output_numpy=True):
  28. buffer.append(d)
  29. assert len(buffer) == 8
  30. def test_ag_news_dataset_one_file():
  31. """
  32. Feature: Test AG News Dataset.
  33. Description: read data from a single file.
  34. Expectation: the data is processed successfully.
  35. """
  36. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
  37. buffer = []
  38. for d in data.create_dict_iterator(num_epochs=1, output_numpy=True):
  39. buffer.append(d)
  40. assert len(buffer) == 2
  41. def test_ag_news_dataset_all_file():
  42. """
  43. Feature: Test AG News Dataset(usage=all).
  44. Description: read train data and test data.
  45. Expectation: the data is processed successfully.
  46. """
  47. buffer = []
  48. data = ds.AGNewsDataset(FILE_DIR, usage='all', shuffle=False)
  49. for d in data.create_dict_iterator(num_epochs=1, output_numpy=True):
  50. buffer.append(d)
  51. assert len(buffer) == 5
  52. def test_ag_news_dataset_num_samples():
  53. """
  54. Feature: Test AG News Dataset.
  55. Description: read data from a single file.
  56. Expectation: the data is processed successfully.
  57. """
  58. data = ds.AGNewsDataset(FILE_DIR, usage='all', num_samples=4, shuffle=False)
  59. count = 0
  60. for _ in data.create_dict_iterator(num_epochs=1, output_numpy=True):
  61. count += 1
  62. assert count == 4
  63. def test_ag_news_dataset_distribution():
  64. """
  65. Feature: Test AG News Dataset.
  66. Description: read data from a single file.
  67. Expectation: the data is processed successfully.
  68. """
  69. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False, num_shards=2, shard_id=0)
  70. count = 0
  71. for _ in data.create_dict_iterator(num_epochs=1, output_numpy=True):
  72. count += 1
  73. assert count == 1
  74. def test_ag_news_dataset_quoted():
  75. """
  76. Feature: Test get the AG News Dataset.
  77. Description: read AGNewsDataset data and get data.
  78. Expectation: the data is processed successfully.
  79. """
  80. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
  81. buffer = []
  82. for d in data.create_dict_iterator(num_epochs=1, output_numpy=True):
  83. buffer.extend([d['index'].item().decode("utf8"),
  84. d['title'].item().decode("utf8"),
  85. d['description'].item().decode("utf8")])
  86. assert buffer == ["3", "Background of the selection",
  87. "In this day and age, the internet is growing rapidly, "
  88. "the total number of connected devices is increasing and "
  89. "we are entering the era of big data.",
  90. "4", "Related technologies",
  91. "\"Leaflet is the leading open source JavaScript library "
  92. "for mobile-friendly interactive maps.\""]
  93. def test_ag_news_dataset_size():
  94. """
  95. Feature: Test Getters.
  96. Description: test get_dataset_size of AG News dataset.
  97. Expectation: the data is processed successfully.
  98. """
  99. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
  100. assert data.get_dataset_size() == 2
  101. def test_ag_news_dataset_exception():
  102. """
  103. Feature: Error Test.
  104. Description: test the wrong input.
  105. Expectation: unable to read in data.
  106. """
  107. def exception_func(item):
  108. raise Exception("Error occur!")
  109. try:
  110. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
  111. data = data.map(operations=exception_func, input_columns=["index"], num_parallel_workers=1)
  112. for _ in data.__iter__():
  113. pass
  114. assert False
  115. except RuntimeError as e:
  116. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  117. try:
  118. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
  119. data = data.map(operations=exception_func, input_columns=["title"], num_parallel_workers=1)
  120. for _ in data.__iter__():
  121. pass
  122. assert False
  123. except RuntimeError as e:
  124. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  125. try:
  126. data = ds.AGNewsDataset(FILE_DIR, usage='test', shuffle=False)
  127. data = data.map(operations=exception_func, input_columns=["description"], num_parallel_workers=1)
  128. for _ in data.__iter__():
  129. pass
  130. assert False
  131. except RuntimeError as e:
  132. assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
  133. if __name__ == "__main__":
  134. test_ag_news_dataset_basic()
  135. test_ag_news_dataset_one_file()
  136. test_ag_news_dataset_all_file()
  137. test_ag_news_dataset_num_samples()
  138. test_ag_news_dataset_distribution()
  139. test_ag_news_dataset_quoted()
  140. test_ag_news_dataset_size()
  141. test_ag_news_dataset_exception()