You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

context.cc 7.2 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198
  1. /**
  2. * Copyright 2019 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "parallel/context.h"
  17. #include <algorithm>
  18. #include <cstdint>
  19. #include <functional>
  20. #include <memory>
  21. #include <numeric>
  22. #include <utility>
  23. #include <map>
  24. #include "common/utils.h"
  25. #include "parallel/device_manager.h"
  26. namespace mindspore {
  27. namespace parallel {
  28. static std::map<std::string, std::vector<int>> param_shapes;
  29. std::vector<std::string> PARALLEL_MODE_LIST = {STAND_ALONE, DATA_PARALLEL, HYBRID_PARALLEL, SEMI_AUTO_PARALLEL,
  30. AUTO_PARALLEL};
  31. std::vector<std::string> STRATEGY_SEARCH_MODE_LIST = {DYNAMIC_PROGRAMMING, RECURSIVE_PROGRAMMING};
  32. std::shared_ptr<ParallelContext> ParallelContext::inst_context_ = nullptr;
  33. std::shared_ptr<ParallelContext> ParallelContext::GetInstance() {
  34. if (inst_context_ == nullptr) {
  35. inst_context_.reset(new (std::nothrow) ParallelContext());
  36. }
  37. return inst_context_;
  38. }
  39. ParallelContext::ParallelContext() { Reset(); }
  40. void ParallelContext::Reset() {
  41. mirror_mean_ = false;
  42. full_batch_ = false;
  43. cast_before_mirror_ = true;
  44. loss_repeated_mean_ = true;
  45. device_num_ = 1;
  46. global_rank_ = 0;
  47. communication_backend_ = HCCL_BACKEND;
  48. device_num_is_set_ = false;
  49. global_rank_is_set_ = false;
  50. parallel_mode_ = STAND_ALONE;
  51. parameter_broadcast_ = false;
  52. parameter_broadcast_is_set_ = false;
  53. enable_all_reduce_fusion_ = false;
  54. strategy_ckpt_load_file_ = "";
  55. strategy_ckpt_save_file_ = "";
  56. enable_parallel_optimizer_ = false;
  57. }
  58. void ParallelContext::set_device_num(int32_t device_num) {
  59. device_num_ = device_num;
  60. device_num_is_set_ = true;
  61. }
  62. void ParallelContext::set_global_rank(int32_t global_rank) {
  63. global_rank_ = global_rank;
  64. global_rank_is_set_ = true;
  65. }
  66. void ParallelContext::set_mirror_mean(bool mirror_mean) { mirror_mean_ = mirror_mean; }
  67. void ParallelContext::set_full_batch(bool full_batch) { full_batch_ = full_batch; }
  68. void ParallelContext::set_cast_before_mirror(bool cast_before_mirror) { cast_before_mirror_ = cast_before_mirror; }
  69. void ParallelContext::set_loss_repeated_mean(bool loss_repeated_mean) { loss_repeated_mean_ = loss_repeated_mean; }
  70. void ParallelContext::set_communication_backend(const std::string &communication_backend) {
  71. communication_backend_ = communication_backend;
  72. }
  73. bool ParallelContext::set_parallel_mode(const std::string &parallel_mode) {
  74. auto iter = std::find(PARALLEL_MODE_LIST.begin(), PARALLEL_MODE_LIST.end(), parallel_mode);
  75. if (iter == PARALLEL_MODE_LIST.end()) {
  76. MS_LOG(INFO) << "Invalid parallel mode:" << parallel_mode;
  77. return false;
  78. }
  79. parallel_mode_ = parallel_mode;
  80. return true;
  81. }
  82. bool ParallelContext::set_strategy_search_mode(const std::string &strategy_search_mode) {
  83. auto iter = std::find(STRATEGY_SEARCH_MODE_LIST.begin(), STRATEGY_SEARCH_MODE_LIST.end(), strategy_search_mode);
  84. if (iter == STRATEGY_SEARCH_MODE_LIST.end()) {
  85. MS_LOG(INFO) << "Invalid strategy search mode mode: " << strategy_search_mode;
  86. return false;
  87. }
  88. strategy_search_mode_ = strategy_search_mode;
  89. return true;
  90. }
  91. void ParallelContext::set_parameter_broadcast(bool parameter_broadcast) {
  92. parameter_broadcast_ = parameter_broadcast;
  93. parameter_broadcast_is_set_ = true;
  94. }
  95. void ParallelContext::set_strategy_ckpt_load_file(const std::string &strategy_ckpt_load_file) {
  96. strategy_ckpt_load_file_ = strategy_ckpt_load_file;
  97. }
  98. void ParallelContext::set_strategy_ckpt_save_file(const std::string &strategy_ckpt_save_file) {
  99. strategy_ckpt_save_file_ = strategy_ckpt_save_file;
  100. }
  101. void ParallelContext::SetAllReduceFusionSplitIndices(const std::vector<uint32_t> indices, const std::string &group) {
  102. all_reduce_fusion_split_indices_[group] = indices;
  103. }
  104. const std::vector<uint32_t> ParallelContext::GetAllReduceFusionSplitIndices(const std::string &group) const {
  105. auto iter = all_reduce_fusion_split_indices_.find(group);
  106. if (iter != all_reduce_fusion_split_indices_.end()) {
  107. return iter->second;
  108. }
  109. return {};
  110. }
  111. void ParallelContext::SetAllReduceFusionSplitSizes(const std::vector<uint32_t> sizes, const std::string &group) {
  112. all_reduce_fusion_split_sizes_[group] = sizes;
  113. }
  114. const std::vector<uint32_t> ParallelContext::GetAllReduceFusionSplitSizes(const std::string &group) const {
  115. auto iter = all_reduce_fusion_split_sizes_.find(group);
  116. if (iter != all_reduce_fusion_split_sizes_.end()) {
  117. return iter->second;
  118. }
  119. return {};
  120. }
  121. // Clear param_shapes before training in auto-parallel or semi-auto-parallel mode
  122. void ParallelParameterContextInit(const FuncGraphPtr &func_graph) {
  123. MS_EXCEPTION_IF_NULL(func_graph);
  124. if (!func_graph->has_flag(AUTO_PARALLEL) || !func_graph->has_flag(TRAINING)) {
  125. return;
  126. }
  127. param_shapes.clear();
  128. }
  129. // Restore the parameters' shape for evaluation/prediction in auto-parallel or semi-auto-parallel mode
  130. void ParallelParameterContextRestoreInNoTraining(const FuncGraphPtr &func_graph, const ParameterPtr &param_node,
  131. AbstractBasePtr ptr) {
  132. MS_EXCEPTION_IF_NULL(func_graph);
  133. MS_EXCEPTION_IF_NULL(param_node);
  134. MS_EXCEPTION_IF_NULL(ptr);
  135. if (!func_graph->has_flag(AUTO_PARALLEL) || (func_graph->attrs().count(TRAINING) == 0) ||
  136. func_graph->has_flag(TRAINING)) {
  137. return;
  138. }
  139. auto iter = param_shapes.find(param_node->name());
  140. if (iter == param_shapes.end()) {
  141. MS_LOG(WARNING) << "Can not found the shape for parameter " << param_node->name();
  142. return;
  143. }
  144. std::vector<int> shape = iter->second;
  145. std::shared_ptr<abstract::BaseShape> base_shape = std::make_shared<abstract::Shape>(shape);
  146. ptr->set_shape(base_shape);
  147. MS_LOG(DEBUG) << "The parameter name is " << param_node->name() << ", the shape is " << shape;
  148. }
  149. // Checkpoint the parameters' shape for training in auto-parallel or semi-auto-parallel mode
  150. void ParallelParameterContextCkptInTraining(const FuncGraphPtr &func_graph, const ParameterPtr &param_node,
  151. const AbstractBasePtr &ptr) {
  152. MS_EXCEPTION_IF_NULL(func_graph);
  153. MS_EXCEPTION_IF_NULL(param_node);
  154. MS_EXCEPTION_IF_NULL(ptr);
  155. if (!func_graph->has_flag(AUTO_PARALLEL) || !func_graph->has_flag(TRAINING)) {
  156. return;
  157. }
  158. std::vector<int> shape = dyn_cast<abstract::Shape>(ptr->GetShapeTrack())->shape();
  159. auto ret = param_shapes.try_emplace(param_node->name(), shape);
  160. if (!ret.second) {
  161. MS_LOG(EXCEPTION) << "The shape for parameter name " << param_node->name() << " is existed";
  162. return;
  163. }
  164. MS_LOG(DEBUG) << "The parameter name is " << param_node->name() << ", the shape is " << shape;
  165. }
  166. } // namespace parallel
  167. } // namespace mindspore