You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

matmul_info.h 3.5 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. /**
  2. * Copyright 2019 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_PARALLEL_OPS_INFO_MATMUL_INFO_H_
  17. #define MINDSPORE_CCSRC_PARALLEL_OPS_INFO_MATMUL_INFO_H_
  18. #include <memory>
  19. #include <string>
  20. #include <unordered_map>
  21. #include <vector>
  22. #include "common/utils.h"
  23. #include "ir/value.h"
  24. #include "parallel/auto_parallel/operator_costmodel.h"
  25. #include "parallel/ops_info/operator_info.h"
  26. #include "parallel/strategy.h"
  27. namespace mindspore {
  28. namespace parallel {
  29. class MatMulBase : public OperatorInfo {
  30. public:
  31. MatMulBase(const std::string &name, const Shapes &inputs_shape, const Shapes &outputs_shape,
  32. const PrimitiveAttrs &attrs)
  33. : OperatorInfo(name, inputs_shape, outputs_shape, attrs, std::make_shared<MatMulCost>(true)) {}
  34. ~MatMulBase() override = default;
  35. Status Init(const StrategyPtr &strategy) override;
  36. Status InitForCostModel(const StrategyPtr &strategy) override;
  37. // Generate all strategies and the corresponding cost for this MatMul operator
  38. Status GenerateStrategies(int32_t stage_id) override;
  39. Status SetCostUnderStrategy(const StrategyPtr &strategy) override;
  40. Status PrepareStrategy(int32_t stage_id, size_t dev_num, Dimensions combined_partitions, size_t input0_shape_size,
  41. size_t input1_shape_size, StrategyPtr *sp);
  42. Status SwapLastTwoElements(Shape *shape);
  43. protected:
  44. Status InferMirrorOps() override;
  45. Status InferForwardCommunication() override;
  46. Status InferTensorInfo() override;
  47. Status InferDevMatrixShape() override;
  48. Status InferTensorMap() override;
  49. Status InferTensorLayout(TensorLayouts *inputs_layout, TensorLayouts *outputs_layout);
  50. void InitTensorInfoForCost(std::vector<TensorInfo> *);
  51. Status CheckForTensorSliceValid() const;
  52. Status GetAttrs() override;
  53. bool transpose_a_ = false;
  54. bool transpose_b_ = false;
  55. bool forward_reduce_scatter_ = false;
  56. size_t mat_a_dimension_ = 0;
  57. size_t mat_b_dimension_ = 0;
  58. };
  59. class MatMul : public MatMulBase {
  60. public:
  61. MatMul(const std::string &name, const Shapes &inputs_shape, const Shapes &outputs_shape, const PrimitiveAttrs &attrs)
  62. : MatMulBase(name, inputs_shape, outputs_shape, attrs) {}
  63. ~MatMul() override = default;
  64. protected:
  65. Status CheckStrategy(const StrategyPtr &strategy) override;
  66. };
  67. class MatMulInfo : public MatMul {
  68. public:
  69. MatMulInfo(const std::string &name, const Shapes &inputs_shape, const Shapes &outputs_shape,
  70. const PrimitiveAttrs &attrs)
  71. : MatMul(name, inputs_shape, outputs_shape, attrs) {}
  72. ~MatMulInfo() override = default;
  73. };
  74. class BatchMatMulInfo : public MatMul {
  75. public:
  76. BatchMatMulInfo(const std::string &name, const Shapes &inputs_shape, const Shapes &outputs_shape,
  77. const PrimitiveAttrs &attrs)
  78. : MatMul(name, inputs_shape, outputs_shape, attrs) {}
  79. ~BatchMatMulInfo() override = default;
  80. };
  81. } // namespace parallel
  82. } // namespace mindspore
  83. #endif // MINDSPORE_CCSRC_PARALLEL_OPS_INFO_MATMUL_INFO_H_