You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

allreduce_graph.h 3.6 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. /**
  2. * Copyright 2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_CCSRC_PARALLEL_ALLREDUCE_FUSION_ALLREDUCE_GRAPH_H_
  17. #define MINDSPORE_CCSRC_PARALLEL_ALLREDUCE_FUSION_ALLREDUCE_GRAPH_H_
  18. #include <memory>
  19. #include <set>
  20. #include <unordered_map>
  21. #include <unordered_set>
  22. #include <utility>
  23. #include <vector>
  24. #include "ir/anf.h"
  25. #include "parallel/allreduce_fusion/allreduce_node.h"
  26. #include "parallel/status.h"
  27. namespace mindspore {
  28. namespace parallel {
  29. class AllreduceGraph {
  30. public:
  31. AllreduceGraph()
  32. : head_cnode_(nullptr),
  33. arnode_set_(),
  34. arnode_vec_(),
  35. cnode_set_(),
  36. para_cnode_map_(),
  37. para_cnodeset_map_(),
  38. cnode_paraset_map_(),
  39. cnode_arnode_map_(),
  40. max_(0) {}
  41. virtual ~AllreduceGraph() = default;
  42. Status AddNode(const CNodePtr &node, const AnfNodePtr &para);
  43. Status AddEdge(const CNodePtr &from, const CNodePtr &to, double dist);
  44. bool NodeInGraph(const CNodePtr &node) const;
  45. std::vector<AnfNodePtr> GetParaByCost(double from, double to);
  46. // Find the first several AllreduceNode whose depend_feat_size is less than to, the sum of whose parameter size is
  47. // over para_size.
  48. // Return the parameter AnfNodePtr vector corresponding to these AllreduceNodes and the smallest depend_feat_size.
  49. // If the sum of left AllreduceNode's parameter size is less than para_size, the returned depend_feat_size must be 0.
  50. std::pair<std::vector<AnfNodePtr>, double> GetParaByParaSize(double to, double para_size);
  51. // If one parameter is used by multiple AllreduceNode, parameter belong to the last node for backward computation
  52. // is saved by the corresponding AllreduceNode, parameters belong to other AllreduceNode are removed.
  53. // Called during precise optimization, not implemented temporarily.
  54. void SortArnode();
  55. Status RemoveExtraParas();
  56. void PrintCNodeSet() const;
  57. void PrintAllredueGraphInfo() const;
  58. void PrintArnodeVec() const;
  59. void PrintArnodeSet() const;
  60. const std::unordered_set<CNodePtr> &cnode_set() const { return cnode_set_; }
  61. CNodePtr head_cnode() const { return head_cnode_; }
  62. Status set_head_cnode(const CNodePtr &node);
  63. double max() const { return max_; }
  64. private:
  65. CNodePtr head_cnode_;
  66. std::set<AllreduceNodePtr> arnode_set_;
  67. std::vector<AllreduceNode> arnode_vec_;
  68. std::unordered_set<CNodePtr> cnode_set_;
  69. // If One ParameterPtr is used by multiple CNode, the last node for backward computation is saved.
  70. std::unordered_map<AnfNodePtr, std::vector<CNodePtr>> para_cnode_map_;
  71. // One ParameterPtr may be used by multiple CNode
  72. std::unordered_map<AnfNodePtr, std::unordered_set<CNodePtr>> para_cnodeset_map_;
  73. // Multiple Parameter may be inputs to the same CNode
  74. std::unordered_map<CNodePtr, std::unordered_set<AnfNodePtr>> cnode_paraset_map_;
  75. std::unordered_map<CNodePtr, AllreduceNodePtr> cnode_arnode_map_;
  76. double max_;
  77. };
  78. } // namespace parallel
  79. } // namespace mindspore
  80. #endif // MINDSPORE_CCSRC_PARALLEL_ALLREDUCE_FUSION_ALLREDUCE_GRAPH_H_