You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

DatasetToDataframe.py 3.7 kB

first commit Former-commit-id: 08bc23ba02cffbce3cf63962390a65459a132e48 [formerly 0795edd4834b9b7dc66db8d10d4cbaf42bbf82cb] [formerly b5010b42541add7e2ea2578bf2da537efc457757 [formerly a7ca09c2c34c4fc8b3d8e01fcfa08eeeb2cae99d]] [formerly 615058473a2177ca5b89e9edbb797f4c2a59c7e5 [formerly 743d8dfc6843c4c205051a8ab309fbb2116c895e] [formerly bb0ea98b1e14154ef464e2f7a16738705894e54b [formerly 960a69da74b81ef8093820e003f2d6c59a34974c]]] [formerly 2fa3be52c1b44665bc81a7cc7d4cea4bbf0d91d5 [formerly 2054589f0898627e0a17132fd9d4cc78efc91867] [formerly 3b53730e8a895e803dfdd6ca72bc05e17a4164c1 [formerly 8a2fa8ab7baf6686d21af1f322df46fd58c60e69]] [formerly 87d1e3a07a19d03c7d7c94d93ab4fa9f58dada7c [formerly f331916385a5afac1234854ee8d7f160f34b668f] [formerly 69fb3c78a483343f5071da4f7e2891b83a49dd18 [formerly 386086f05aa9487f65bce2ee54438acbdce57650]]]] Former-commit-id: a00aed8c934a6460c4d9ac902b9a74a3d6864697 [formerly 26fdeca29c2f07916d837883983ca2982056c78e] [formerly 0e3170d41a2f99ecf5c918183d361d4399d793bf [formerly 3c12ad4c88ac5192e0f5606ac0d88dd5bf8602dc]] [formerly d5894f84f2fd2e77a6913efdc5ae388cf1be0495 [formerly ad3e7bc670ff92c992730d29c9d3aa1598d844e8] [formerly 69fb3c78a483343f5071da4f7e2891b83a49dd18]] Former-commit-id: 3c19c9fae64f6106415fbc948a4dc613b9ee12f8 [formerly 467ddc0549c74bb007e8f01773bb6dc9103b417d] [formerly 5fa518345d958e2760e443b366883295de6d991c [formerly 3530e130b9fdb7280f638dbc2e785d2165ba82aa]] Former-commit-id: 9f5d473d42a435ec0d60149939d09be1acc25d92 [formerly be0b25c4ec2cde052a041baf0e11f774a158105d] Former-commit-id: 9eca71cb73ba9edccd70ac06a3b636b8d4093b04
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. import os
  2. import typing
  3. from d3m import container, utils as d3m_utils
  4. from d3m.base import utils as base_utils
  5. from d3m.metadata import base as metadata_base, hyperparams
  6. from d3m.primitive_interfaces import base, transformer
  7. import common_primitives
  8. __all__ = ('DatasetToDataFramePrimitive',)
  9. Inputs = container.Dataset
  10. Outputs = container.DataFrame
  11. class Hyperparams(hyperparams.Hyperparams):
  12. dataframe_resource = hyperparams.Hyperparameter[typing.Union[str, None]](
  13. default=None,
  14. semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
  15. description="Resource ID of a DataFrame to extract if there are multiple tabular resources inside a Dataset and none is a dataset entry point.",
  16. )
  17. class DatasetToDataFramePrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
  18. """
  19. A primitive which extracts a DataFrame out of a Dataset.
  20. """
  21. metadata = metadata_base.PrimitiveMetadata(
  22. {
  23. 'id': '4b42ce1e-9b98-4a25-b68e-fad13311eb65',
  24. 'version': '0.3.0',
  25. 'name': "Extract a DataFrame from a Dataset",
  26. 'python_path': 'd3m.primitives.tods.data_processing.dataset_to_dataframe',
  27. 'source': {
  28. 'name': common_primitives.__author__,
  29. 'contact': 'mailto:mitar.commonprimitives@tnode.com',
  30. 'uris': [
  31. 'https://gitlab.com/datadrivendiscovery/common-primitives/blob/master/common_primitives/dataset_to_dataframe.py',
  32. 'https://gitlab.com/datadrivendiscovery/common-primitives.git',
  33. ],
  34. },
  35. 'installation': [{
  36. 'type': metadata_base.PrimitiveInstallationType.PIP,
  37. 'package_uri': 'git+https://gitlab.com/datadrivendiscovery/common-primitives.git@{git_commit}#egg=common_primitives'.format(
  38. git_commit=d3m_utils.current_git_commit(os.path.dirname(__file__)),
  39. ),
  40. }],
  41. 'algorithm_types': [
  42. metadata_base.PrimitiveAlgorithmType.DATA_CONVERSION,
  43. ],
  44. 'primitive_family': metadata_base.PrimitiveFamily.DATA_TRANSFORMATION,
  45. },
  46. )
  47. def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
  48. dataframe_resource_id, dataframe = base_utils.get_tabular_resource(inputs, self.hyperparams['dataframe_resource'])
  49. dataframe.metadata = self._update_metadata(inputs.metadata, dataframe_resource_id)
  50. assert isinstance(dataframe, container.DataFrame), type(dataframe)
  51. return base.CallResult(dataframe)
  52. def _update_metadata(self, metadata: metadata_base.DataMetadata, resource_id: metadata_base.SelectorSegment) -> metadata_base.DataMetadata:
  53. resource_metadata = dict(metadata.query((resource_id,)))
  54. if 'structural_type' not in resource_metadata or not issubclass(resource_metadata['structural_type'], container.DataFrame):
  55. raise TypeError("The Dataset resource is not a DataFrame, but \"{type}\".".format(
  56. type=resource_metadata.get('structural_type', None),
  57. ))
  58. resource_metadata.update(
  59. {
  60. 'schema': metadata_base.CONTAINER_SCHEMA_VERSION,
  61. },
  62. )
  63. new_metadata = metadata_base.DataMetadata(resource_metadata)
  64. new_metadata = metadata.copy_to(new_metadata, (resource_id,))
  65. # Resource is not anymore an entry point.
  66. new_metadata = new_metadata.remove_semantic_type((), 'https://metadata.datadrivendiscovery.org/types/DatasetEntryPoint')
  67. return new_metadata

全栈的自动化机器学习系统,主要针对多变量时间序列数据的异常检测。TODS提供了详尽的用于构建基于机器学习的异常检测系统的模块,它们包括:数据处理(data processing),时间序列处理( time series processing),特征分析(feature analysis),检测算法(detection algorithms),和强化模块( reinforcement module)。这些模块所提供的功能包括常见的数据预处理、时间序列数据的平滑或变换,从时域或频域中抽取特征、多种多样的检测算