You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

trans.h 3.0 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_COMMON_TRANS_H
  17. #define MINDSPORE_CCSRC_COMMON_TRANS_H
  18. #include <algorithm>
  19. #include <functional>
  20. #include <map>
  21. #include <memory>
  22. #include <string>
  23. #include <utility>
  24. #include <vector>
  25. #include "ir/dtype.h"
  26. #include "kernel/kernel.h"
  27. #include "ir/dtype/type.h"
  28. namespace mindspore {
  29. namespace trans {
  30. struct TypeIdArgs {
  31. const void *data;
  32. size_t host_shape_size; // Multiply each dimension elements. [a, b, c, d] => a*b*c*d
  33. TypeId host_data_type;
  34. TypeId device_data_type;
  35. size_t data_size;
  36. };
  37. struct FormatArgs {
  38. const void *data;
  39. const size_t device_size;
  40. std::string host_format;
  41. std::string device_format;
  42. std::vector<size_t> host_shape;
  43. std::vector<size_t> device_shape;
  44. TypeId src_data_type;
  45. };
  46. size_t TypeIdSize(const TypeId data_type);
  47. size_t ShapeSize(const std::vector<size_t> &shape);
  48. size_t CubeSizeByType(const TypeId data_type);
  49. std::vector<size_t> PaddingShapeTo4d(const std::vector<size_t> &shape,
  50. const std::vector<kernel::Axis> &padding_axis = {});
  51. std::vector<int> GetRuntimePaddingShape(const AnfNodePtr &node, size_t index);
  52. bool IsNeedPadding(const std::string &format, const size_t shape_size);
  53. std::vector<size_t> TransShapeToDevice(const std::vector<size_t> &shape, const std::string &format);
  54. bool TransDataType(const TypeIdArgs &args, void *result);
  55. bool TransFormat(const FormatArgs &args, void *result);
  56. bool TransFormatFromDeviceToHost(const FormatArgs &args, void *result);
  57. // host to device
  58. bool NchwTo4D(const FormatArgs &args, void *result);
  59. bool NchwToFracZ(const FormatArgs &args, void *result);
  60. bool NchwToFracNz(const FormatArgs &args, void *result);
  61. bool NchwToNc1hwc0(const FormatArgs &args, void *result);
  62. bool NchwToFracZc04(const FormatArgs &args, void *result);
  63. bool NchwToNc1hwc04(const FormatArgs &args, void *result);
  64. bool NchwToC1hwncoc0(const FormatArgs &args, void *result);
  65. // device to host
  66. bool ToNchw(const FormatArgs &args, void *result);
  67. bool FracZToNchw(const FormatArgs &args, void *result);
  68. bool FracNzToNchw(const FormatArgs &args, void *result);
  69. bool Nc1hwc0ToNchw(const FormatArgs &args, void *result);
  70. bool Nc1hwc04ToNchw(const FormatArgs &args, void *result);
  71. bool C1hwncoc0ToNchw(const FormatArgs &args, void *result);
  72. } // namespace trans
  73. } // namespace mindspore
  74. #endif // MINDSPORE_CCSRC_COMMON_TRANS_H