You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

reduce_eliminate.h 5.3 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_FRONTEND_OPTIMIZER_IRPASS_REDUCE_ELIMINATE_H_
  17. #define MINDSPORE_CCSRC_FRONTEND_OPTIMIZER_IRPASS_REDUCE_ELIMINATE_H_
  18. #include <vector>
  19. #include <algorithm>
  20. #include <memory>
  21. #include "frontend/optimizer/irpass.h"
  22. #include "frontend/optimizer/optimizer.h"
  23. #include "frontend/optimizer/anf_visitor.h"
  24. #include "frontend/operator/ops.h"
  25. #include "abstract/dshape.h"
  26. namespace mindspore {
  27. namespace opt {
  28. namespace irpass {
  29. using abstract::Shape;
  30. using abstract::ShapePtr;
  31. // {ReduceLike, X, axis}
  32. class ReduceOneEliminater : public AnfVisitor {
  33. public:
  34. AnfNodePtr operator()(const OptimizerPtr &, const AnfNodePtr &node) override {
  35. Reset();
  36. PrimitivePtr prim;
  37. if (IsPrimitiveCNode(node, prim::kPrimReduceMean) || IsPrimitiveCNode(node, prim::kPrimReduceAll) ||
  38. IsPrimitiveCNode(node, prim::kPrimReduceSum) || IsPrimitiveCNode(node, prim::kPrimReduceMax) ||
  39. IsPrimitiveCNode(node, prim::kPrimReduceMin)) {
  40. prim = GetValueNode<PrimitivePtr>(node->cast<CNodePtr>()->input(0));
  41. AnfVisitor::Match(prim, {IsNode, IsVNode})(node);
  42. if (!is_axis_one_) {
  43. return nullptr;
  44. }
  45. // consider keep_dims
  46. auto keep_dims = prim->GetAttr("keep_dims");
  47. auto is_keep_dims = GetValue<bool>(keep_dims);
  48. // {_Reduce, X, axis} -> X
  49. if (is_keep_dims) {
  50. return x_;
  51. }
  52. // {_Reduce, Tensor}
  53. if (is_tensor_) {
  54. return nullptr;
  55. }
  56. // {_Reduce, X, axis} -> {Reshape, X, new_shape}
  57. std::vector<ValuePtr> elements;
  58. for (size_t i = 0; i < x_shape_.size(); i++) {
  59. auto iter = find(axis_.begin(), axis_.end(), i);
  60. if (iter == axis_.end()) {
  61. ValuePtr s = MakeValue(x_shape_[i]);
  62. elements.push_back(s);
  63. }
  64. }
  65. auto new_shape = std::make_shared<ValueTuple>(elements);
  66. auto reshape_op = prim::GetPythonOps("reshape", "mindspore.ops.functional")->cast<PrimitivePtr>();
  67. auto node_abstract = node->abstract();
  68. // handle auto_parallel get nullptr abstract
  69. if (node_abstract != nullptr) {
  70. auto new_base_shape = std::make_shared<abstract::Shape>(GetValue<std::vector<int>>(new_shape));
  71. node_abstract->set_shape(new_base_shape);
  72. auto new_node = node->func_graph()->NewCNode({NewValueNode(reshape_op), x_, NewValueNode(new_shape)});
  73. new_node->set_abstract(node_abstract);
  74. return new_node;
  75. }
  76. return node->func_graph()->NewCNode({NewValueNode(reshape_op), x_, NewValueNode(new_shape)});
  77. }
  78. return nullptr;
  79. }
  80. void Visit(const AnfNodePtr &node) override {
  81. if (!IsVNode(node) && x_ == nullptr) {
  82. if (IsValueNode<tensor::Tensor>(node)) {
  83. is_tensor_ = true;
  84. }
  85. // get X's shape
  86. auto x_shape_abs = node->abstract();
  87. if (x_shape_abs != nullptr) {
  88. auto x_track = x_shape_abs->GetShapeTrack()->cast<ShapePtr>();
  89. if (x_track == nullptr) {
  90. return;
  91. }
  92. auto x_shape = x_track->shape();
  93. (void)std::copy(x_shape.begin(), x_shape.end(), std::back_inserter(x_shape_));
  94. x_ = node;
  95. }
  96. return;
  97. }
  98. // check axis
  99. AnfVisitor::Visit(node);
  100. }
  101. void Visit(const ValueNodePtr &vnode) override {
  102. if (x_shape_.empty()) {
  103. return;
  104. }
  105. // axis : int
  106. if (IsValueNode<Int32Imm>(vnode)) {
  107. auto idx = GetValue<int>(vnode->value());
  108. // axis could be negative
  109. if (idx < 0) {
  110. idx += SizeToInt(x_shape_.size());
  111. }
  112. if (SizeToInt(x_shape_.size()) > idx && x_shape_[IntToSize(idx)] == 1) {
  113. is_axis_one_ = true;
  114. axis_.push_back(idx);
  115. }
  116. return;
  117. }
  118. // axis : tuple(int), default ()
  119. if (IsValueNode<ValueTuple>(vnode)) {
  120. auto axis = GetValue<std::vector<int>>(vnode->value());
  121. if (axis.empty()) {
  122. return;
  123. }
  124. auto cmp = std::all_of(axis.cbegin(), axis.cend(), [this](int idx) {
  125. // axis could be negative
  126. if (idx < 0) {
  127. idx += SizeToInt(x_shape_.size());
  128. }
  129. return SizeToInt(this->x_shape_.size()) > idx && this->x_shape_[IntToSize(idx)] == 1;
  130. });
  131. if (cmp) {
  132. is_axis_one_ = true;
  133. (void)std::copy(axis.begin(), axis.end(), std::back_inserter(axis_));
  134. }
  135. }
  136. }
  137. void Reset() {
  138. axis_.clear();
  139. x_shape_.clear();
  140. x_ = nullptr;
  141. is_axis_one_ = false;
  142. is_tensor_ = false;
  143. }
  144. private:
  145. bool is_axis_one_{false}, is_tensor_{false};
  146. std::vector<int> axis_{}, x_shape_{};
  147. AnfNodePtr x_{nullptr};
  148. };
  149. } // namespace irpass
  150. } // namespace opt
  151. } // namespace mindspore
  152. #endif // MINDSPORE_CCSRC_FRONTEND_OPTIMIZER_IRPASS_REDUCE_ELIMINATE_H_