You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

reduce_eliminate.h 4.8 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_OPTIMIZER_IRPASS_REDUCE_ELIMINATE_H_
  17. #define MINDSPORE_CCSRC_OPTIMIZER_IRPASS_REDUCE_ELIMINATE_H_
  18. #include <vector>
  19. #include <algorithm>
  20. #include <memory>
  21. #include "optimizer/irpass.h"
  22. #include "optimizer/optimizer.h"
  23. #include "ir/visitor.h"
  24. #include "operator/ops.h"
  25. #include "pipeline/static_analysis/dshape.h"
  26. namespace mindspore {
  27. namespace opt {
  28. namespace irpass {
  29. using abstract::Shape;
  30. using abstract::ShapePtr;
  31. // {ReduceLike, X, axis}
  32. class ReduceOneEliminater : public AnfVisitor {
  33. public:
  34. AnfNodePtr operator()(const OptimizerPtr &, const AnfNodePtr &node) override {
  35. Reset();
  36. PrimitivePtr prim;
  37. if (IsPrimitiveCNode(node, prim::kPrimReduceMean) || IsPrimitiveCNode(node, prim::kPrimReduceAll) ||
  38. IsPrimitiveCNode(node, prim::kPrimReduceSum) || IsPrimitiveCNode(node, prim::kPrimReduceMax) ||
  39. IsPrimitiveCNode(node, prim::kPrimReduceMin)) {
  40. prim = GetValueNode<PrimitivePtr>(node->cast<CNodePtr>()->input(0));
  41. AnfVisitor::Match(prim, {IsNode, IsVNode})(node);
  42. if (!is_axis_one_) {
  43. return nullptr;
  44. }
  45. // consider keep_dims
  46. auto keep_dims = prim->GetAttr("keep_dims");
  47. auto is_keep_dims = GetValue<bool>(keep_dims);
  48. // {_Reduce, X, axis} -> X
  49. if (is_keep_dims) {
  50. return x_;
  51. }
  52. // {_Reduce, Tensor}
  53. if (is_tensor_) {
  54. return nullptr;
  55. }
  56. // {_Reduce, X, axis} -> {Reshape, X, new_shape}
  57. std::vector<ValuePtr> elements;
  58. for (size_t i = 0; i < x_shape_.size(); i++) {
  59. auto iter = find(axis_.begin(), axis_.end(), i);
  60. if (iter == axis_.end()) {
  61. ValuePtr s = MakeValue(x_shape_[i]);
  62. elements.push_back(s);
  63. }
  64. }
  65. auto new_shape = std::make_shared<ValueTuple>(elements);
  66. auto reshape_op = prim::GetPythonOps("reshape", "mindspore.ops.functional")->cast<PrimitivePtr>();
  67. return node->func_graph()->NewCNode({NewValueNode(reshape_op), x_, NewValueNode(new_shape)});
  68. }
  69. return nullptr;
  70. }
  71. void Visit(const AnfNodePtr &node) override {
  72. if (x_ == nullptr) {
  73. if (IsValueNode<tensor::Tensor>(node)) {
  74. is_tensor_ = true;
  75. }
  76. // get X's shape
  77. auto x_shape_abs = node->abstract();
  78. if (x_shape_abs != nullptr) {
  79. auto x_track = x_shape_abs->GetShapeTrack()->cast<ShapePtr>();
  80. if (x_track == nullptr) {
  81. return;
  82. }
  83. auto x_shape = x_track->shape();
  84. (void)std::copy(x_shape.begin(), x_shape.end(), std::back_inserter(x_shape_));
  85. x_ = node;
  86. }
  87. return;
  88. }
  89. // check axis
  90. AnfVisitor::Visit(node);
  91. }
  92. void Visit(const ValueNodePtr &vnode) override {
  93. if (x_shape_.empty()) {
  94. return;
  95. }
  96. // axis : int
  97. if (IsValueNode<Int32Imm>(vnode)) {
  98. auto idx = GetValue<int>(vnode->value());
  99. // axis could be negative
  100. if (idx < 0) {
  101. idx += SizeToInt(x_shape_.size());
  102. }
  103. if (SizeToInt(x_shape_.size()) > idx && x_shape_[IntToSize(idx)] == 1) {
  104. is_axis_one_ = true;
  105. axis_.push_back(idx);
  106. }
  107. return;
  108. }
  109. // axis : tuple(int), default ()
  110. if (IsValueNode<ValueTuple>(vnode)) {
  111. auto axis = GetValue<std::vector<int>>(vnode->value());
  112. if (axis.empty()) {
  113. return;
  114. }
  115. auto cmp = std::all_of(axis.cbegin(), axis.cend(), [this](int idx) {
  116. // axis could be negative
  117. if (idx < 0) {
  118. idx += SizeToInt(x_shape_.size());
  119. }
  120. return SizeToInt(this->x_shape_.size()) > idx && this->x_shape_[IntToSize(idx)] == 1;
  121. });
  122. if (cmp) {
  123. is_axis_one_ = true;
  124. (void)std::copy(axis.begin(), axis.end(), std::back_inserter(axis_));
  125. }
  126. }
  127. }
  128. void Reset() {
  129. axis_.clear();
  130. x_shape_.clear();
  131. x_ = nullptr;
  132. is_axis_one_ = false;
  133. is_tensor_ = false;
  134. }
  135. private:
  136. bool is_axis_one_{false}, is_tensor_{false};
  137. std::vector<int> axis_{}, x_shape_{};
  138. AnfNodePtr x_{nullptr};
  139. };
  140. } // namespace irpass
  141. } // namespace opt
  142. } // namespace mindspore
  143. #endif // MINDSPORE_CCSRC_OPTIMIZER_IRPASS_REDUCE_ELIMINATE_H_