You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_file_reader.py 7.7 kB

first commit Former-commit-id: 08bc23ba02cffbce3cf63962390a65459a132e48 [formerly 0795edd4834b9b7dc66db8d10d4cbaf42bbf82cb] [formerly b5010b42541add7e2ea2578bf2da537efc457757 [formerly a7ca09c2c34c4fc8b3d8e01fcfa08eeeb2cae99d]] [formerly 615058473a2177ca5b89e9edbb797f4c2a59c7e5 [formerly 743d8dfc6843c4c205051a8ab309fbb2116c895e] [formerly bb0ea98b1e14154ef464e2f7a16738705894e54b [formerly 960a69da74b81ef8093820e003f2d6c59a34974c]]] [formerly 2fa3be52c1b44665bc81a7cc7d4cea4bbf0d91d5 [formerly 2054589f0898627e0a17132fd9d4cc78efc91867] [formerly 3b53730e8a895e803dfdd6ca72bc05e17a4164c1 [formerly 8a2fa8ab7baf6686d21af1f322df46fd58c60e69]] [formerly 87d1e3a07a19d03c7d7c94d93ab4fa9f58dada7c [formerly f331916385a5afac1234854ee8d7f160f34b668f] [formerly 69fb3c78a483343f5071da4f7e2891b83a49dd18 [formerly 386086f05aa9487f65bce2ee54438acbdce57650]]]] Former-commit-id: a00aed8c934a6460c4d9ac902b9a74a3d6864697 [formerly 26fdeca29c2f07916d837883983ca2982056c78e] [formerly 0e3170d41a2f99ecf5c918183d361d4399d793bf [formerly 3c12ad4c88ac5192e0f5606ac0d88dd5bf8602dc]] [formerly d5894f84f2fd2e77a6913efdc5ae388cf1be0495 [formerly ad3e7bc670ff92c992730d29c9d3aa1598d844e8] [formerly 69fb3c78a483343f5071da4f7e2891b83a49dd18]] Former-commit-id: 3c19c9fae64f6106415fbc948a4dc613b9ee12f8 [formerly 467ddc0549c74bb007e8f01773bb6dc9103b417d] [formerly 5fa518345d958e2760e443b366883295de6d991c [formerly 3530e130b9fdb7280f638dbc2e785d2165ba82aa]] Former-commit-id: 9f5d473d42a435ec0d60149939d09be1acc25d92 [formerly be0b25c4ec2cde052a041baf0e11f774a158105d] Former-commit-id: 9eca71cb73ba9edccd70ac06a3b636b8d4093b04
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. import unittest
  2. import os.path
  3. import sys
  4. COMMON_PRIMITIVES_DIR = os.path.join(os.path.dirname(__file__), 'common-primitives')
  5. # NOTE: This insertion should appear before any code attempting to resolve or load primitives,
  6. # so the git submodule version of `common-primitives` is looked at first.
  7. sys.path.insert(0, COMMON_PRIMITIVES_DIR)
  8. TEST_PRIMITIVES_DIR = os.path.join(os.path.dirname(__file__), 'data', 'primitives')
  9. sys.path.insert(0, TEST_PRIMITIVES_DIR)
  10. from common_primitives.dataset_to_dataframe import DatasetToDataFramePrimitive
  11. from test_primitives.file_reader import DummyImageReaderPrimitive
  12. from d3m import container, utils
  13. class TestDummyImageReaderPrimitive(unittest.TestCase):
  14. def test_basic(self):
  15. dataset_doc_path = os.path.abspath(
  16. os.path.join(os.path.dirname(__file__), 'data', 'datasets', 'image_dataset_1', 'datasetDoc.json')
  17. )
  18. dataset = container.Dataset.load('file://{dataset_doc_path}'.format(dataset_doc_path=dataset_doc_path))
  19. dataframe_hyperparams_class = DatasetToDataFramePrimitive.metadata.get_hyperparams()
  20. dataframe_primitive = DatasetToDataFramePrimitive(
  21. hyperparams=dataframe_hyperparams_class.defaults().replace({'dataframe_resource': '0'})
  22. )
  23. dataframe = dataframe_primitive.produce(inputs=dataset).value
  24. image_hyperparams_class = DummyImageReaderPrimitive.metadata.get_hyperparams()
  25. image_primitive = DummyImageReaderPrimitive(
  26. hyperparams=image_hyperparams_class.defaults().replace({'return_result': 'replace'})
  27. )
  28. images_names = image_primitive.produce(inputs=dataframe).value
  29. self.assertEqual(images_names.iloc[0]['filename'][0], '001_HandPhoto_left_01.jpg')
  30. self.assertEqual(images_names.iloc[1]['filename'][0], 'cifar10_bird_1.png')
  31. self.assertEqual(images_names.iloc[2]['filename'][0], 'cifar10_bird_2.png')
  32. self.assertEqual(images_names.iloc[3]['filename'][0], 'mnist_0_2.png')
  33. self.assertEqual(images_names.iloc[4]['filename'][0], 'mnist_1_1.png')
  34. self._test_metadata(images_names.metadata)
  35. def _test_metadata(self, metadata):
  36. self.assertEqual(
  37. utils.to_json_structure(metadata.to_internal_simple_structure()),
  38. [
  39. {
  40. 'metadata': {
  41. 'dimension': {
  42. 'length': 5,
  43. 'name': 'rows',
  44. 'semantic_types': ['https://metadata.datadrivendiscovery.org/types/TabularRow'],
  45. },
  46. 'schema': 'https://metadata.datadrivendiscovery.org/schemas/v0/container.json',
  47. 'semantic_types': [
  48. 'https://metadata.datadrivendiscovery.org/types/Table',
  49. 'https://metadata.datadrivendiscovery.org/types/FilesCollection',
  50. ],
  51. 'structural_type': 'd3m.container.pandas.DataFrame',
  52. },
  53. 'selector': [],
  54. },
  55. {
  56. 'metadata': {
  57. 'dimension': {
  58. 'length': 1,
  59. 'name': 'columns',
  60. 'semantic_types': ['https://metadata.datadrivendiscovery.org/types/TabularColumn'],
  61. }
  62. },
  63. 'selector': ['__ALL_ELEMENTS__'],
  64. },
  65. {
  66. 'metadata': {
  67. 'dimension': {
  68. 'length': 1,
  69. 'name': 'rows',
  70. 'semantic_types': ['https://metadata.datadrivendiscovery.org/types/TabularRow'],
  71. },
  72. 'location_base_uris': '__NO_VALUE__',
  73. 'media_types': '__NO_VALUE__',
  74. 'name': 'filename',
  75. 'semantic_types': [
  76. 'https://metadata.datadrivendiscovery.org/types/PrimaryKey',
  77. 'http://schema.org/ImageObject',
  78. 'https://metadata.datadrivendiscovery.org/types/Table',
  79. ],
  80. 'structural_type': 'd3m.container.numpy.ndarray',
  81. },
  82. 'selector': ['__ALL_ELEMENTS__', 0],
  83. },
  84. {
  85. 'metadata': {
  86. 'dimension': {
  87. 'length': 1,
  88. 'name': 'columns',
  89. 'semantic_types': ['https://metadata.datadrivendiscovery.org/types/TabularColumn'],
  90. }
  91. },
  92. 'selector': ['__ALL_ELEMENTS__', 0, '__ALL_ELEMENTS__'],
  93. },
  94. {
  95. 'metadata': {'structural_type': 'str'},
  96. 'selector': ['__ALL_ELEMENTS__', 0, '__ALL_ELEMENTS__', '__ALL_ELEMENTS__'],
  97. },
  98. {
  99. 'metadata': {
  100. 'image_reader_metadata': {'foobar': 42},
  101. 'semantic_types': [
  102. 'https://metadata.datadrivendiscovery.org/types/PrimaryKey',
  103. 'http://schema.org/ImageObject',
  104. 'https://metadata.datadrivendiscovery.org/types/Table',
  105. ],
  106. },
  107. 'selector': [0, 0],
  108. },
  109. {
  110. 'metadata': {
  111. 'image_reader_metadata': {'foobar': 42},
  112. 'semantic_types': [
  113. 'https://metadata.datadrivendiscovery.org/types/PrimaryKey',
  114. 'http://schema.org/ImageObject',
  115. 'https://metadata.datadrivendiscovery.org/types/Table',
  116. ],
  117. },
  118. 'selector': [1, 0],
  119. },
  120. {
  121. 'metadata': {
  122. 'image_reader_metadata': {'foobar': 42},
  123. 'semantic_types': [
  124. 'https://metadata.datadrivendiscovery.org/types/PrimaryKey',
  125. 'http://schema.org/ImageObject',
  126. 'https://metadata.datadrivendiscovery.org/types/Table',
  127. ],
  128. },
  129. 'selector': [2, 0],
  130. },
  131. {
  132. 'metadata': {
  133. 'image_reader_metadata': {'foobar': 42},
  134. 'semantic_types': [
  135. 'https://metadata.datadrivendiscovery.org/types/PrimaryKey',
  136. 'http://schema.org/ImageObject',
  137. 'https://metadata.datadrivendiscovery.org/types/Table',
  138. ],
  139. },
  140. 'selector': [3, 0],
  141. },
  142. {
  143. 'metadata': {
  144. 'image_reader_metadata': {'foobar': 42},
  145. 'semantic_types': [
  146. 'https://metadata.datadrivendiscovery.org/types/PrimaryKey',
  147. 'http://schema.org/ImageObject',
  148. 'https://metadata.datadrivendiscovery.org/types/Table',
  149. ],
  150. },
  151. 'selector': [4, 0],
  152. },
  153. ],
  154. )
  155. if __name__ == '__main__':
  156. unittest.main()

全栈的自动化机器学习系统,主要针对多变量时间序列数据的异常检测。TODS提供了详尽的用于构建基于机器学习的异常检测系统的模块,它们包括:数据处理(data processing),时间序列处理( time series processing),特征分析(feature analysis),检测算法(detection algorithms),和强化模块( reinforcement module)。这些模块所提供的功能包括常见的数据预处理、时间序列数据的平滑或变换,从时域或频域中抽取特征、多种多样的检测算