You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

step_parallel.cc 100 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "frontend/parallel/step_parallel.h"
  17. #include <inttypes.h>
  18. #include <sys/time.h>
  19. #include <algorithm>
  20. #include <map>
  21. #include <memory>
  22. #include <set>
  23. #include <string>
  24. #include <unordered_map>
  25. #include <utility>
  26. #include "ir/tensor.h"
  27. #include "ir/param_value.h"
  28. #include "frontend/operator/ops.h"
  29. #include "frontend/optimizer/optimizer.h"
  30. #include "frontend/parallel/auto_parallel/graph_costmodel.h"
  31. #include "frontend/parallel/context.h"
  32. #include "frontend/parallel/device_manager.h"
  33. #include "frontend/parallel/dynamic_creator.h"
  34. #include "frontend/parallel/graph_util/generate_graph.h"
  35. #include "frontend/parallel/graph_util/graph_info.h"
  36. #include "frontend/parallel/graph_util/node_info.h"
  37. #include "frontend/parallel/node_check.h"
  38. #include "frontend/parallel/ops_info/matmul_info.h"
  39. #include "frontend/parallel/strategy_checkpoint/parallel_strategy_checkpoint.h"
  40. #include "utils/comm_manager.h"
  41. #include "utils/symbolic.h"
  42. #include "pipeline/jit/static_analysis/prim.h"
  43. using mindspore::tensor::Tensor;
  44. namespace mindspore {
  45. namespace parallel {
  46. static const std::set<std::string> COMMUNICATION_OPS = {ALL_REDUCE, ALL_GATHER, ALL_TO_ALL, REDUCE_SCATTER};
  47. static const std::set<std::string> INVALID_LOSS_OPS = {GET_NEXT, VIRTUALLOSS};
  48. // g_RefMap, for CNode B input i is a RefKey[Parameter C],
  49. // it will be one item in map with key: C, and value: (B, i)
  50. static std::map<AnfNodePtr, std::pair<AnfNodePtr, int>> g_RefMap;
  51. void SetCommunicationOpGroupLabel(std::vector<AnfNodePtr> new_node_input) {
  52. if (new_node_input.empty()) {
  53. return;
  54. }
  55. ValueNodePtr prim_anf_node = new_node_input[0]->cast<ValueNodePtr>();
  56. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  57. MS_EXCEPTION_IF_NULL(prim);
  58. auto attrs = prim->attrs();
  59. auto iter = attrs.find(GROUP);
  60. if (iter != attrs.end()) {
  61. auto value = iter->second;
  62. MS_EXCEPTION_IF_NULL(value);
  63. if (value->isa<StringImm>()) {
  64. std::string hash_name = value->cast<StringImmPtr>()->value();
  65. MS_EXCEPTION_IF_NULL(g_device_manager);
  66. std::string rank_list_name = g_device_manager->FindRankListNameByHashName(hash_name);
  67. (void)prim->AddAttr(GROUP_RANKS, MakeValue(rank_list_name));
  68. }
  69. }
  70. }
  71. std::vector<AnfNodePtr> CreateInput(const Operator &op, const AnfNodePtr &node, const std::string &instance_name) {
  72. MS_EXCEPTION_IF_NULL(node);
  73. OperatorArgs arg_forward = op.second;
  74. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op.first, instance_name);
  75. MS_EXCEPTION_IF_NULL(pyop_instance);
  76. OperatorParams params = arg_forward.second;
  77. std::vector<AnfNodePtr> new_node_input = {NewValueNode(pyop_instance), node};
  78. if (!params.empty()) {
  79. for (auto &param : params) {
  80. AnfNodePtr val = NewValueNode(param.first.second);
  81. MS_EXCEPTION_IF_NULL(val);
  82. int32_t position = param.second;
  83. (void)new_node_input.insert(new_node_input.begin() + position, val);
  84. }
  85. }
  86. // if the op have 'group' attr, set the rank list name for the op
  87. SetCommunicationOpGroupLabel(new_node_input);
  88. return new_node_input;
  89. }
  90. void InsertNode(const Operator &op, const CNodePtr &node, size_t index, const AnfNodePtr &pre_node,
  91. const FuncGraphPtr &func_graph, const std::string &instance_name) {
  92. // insert new node before the node
  93. FuncGraphManagerPtr manager = func_graph->manager();
  94. MS_EXCEPTION_IF_NULL(manager);
  95. ScopePtr scope = node->scope();
  96. MS_EXCEPTION_IF_NULL(scope);
  97. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  98. CNodePtr new_node = func_graph->NewCNode(node_input);
  99. MS_EXCEPTION_IF_NULL(new_node);
  100. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  101. new_node->set_in_forward_flag(true); // mark forward flag
  102. }
  103. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  104. MS_EXCEPTION_IF_NULL(new_node_value);
  105. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  106. new_node_prim->set_instance_name(instance_name);
  107. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  108. new_node->set_scope(scope);
  109. node_input[0]->set_scope(scope);
  110. manager->SetEdge(node, SizeToInt(index), new_node);
  111. }
  112. std::string CreateInstanceName(const CNodePtr &node, size_t index) {
  113. MS_EXCEPTION_IF_NULL(node);
  114. if (!IsValueNode<Primitive>(node->input(0))) {
  115. MS_LOG(EXCEPTION) << "CreateInstanceName: " << node->ToString() << " doesn't have primitive";
  116. }
  117. std::string name_base = node->fullname_with_scope();
  118. std::string name = name_base + "_" + std::to_string(index);
  119. std::string instance_name = HashInstanceName(name);
  120. return instance_name;
  121. }
  122. void ForwardCommunication(OperatorVector forward_op, const CNodePtr &node) {
  123. MS_EXCEPTION_IF_NULL(node);
  124. // step1:get graph manager distribute_operator
  125. FuncGraphPtr func_graph = node->func_graph();
  126. MS_EXCEPTION_IF_NULL(func_graph);
  127. FuncGraphManagerPtr manager = func_graph->manager();
  128. MS_EXCEPTION_IF_NULL(manager);
  129. auto uses_set = manager->node_users()[node];
  130. CNodePtr node_to_insert = node;
  131. for (auto &uses_pair : uses_set) {
  132. auto uses_cnode = uses_pair.first->cast<CNodePtr>();
  133. MS_EXCEPTION_IF_NULL(uses_cnode);
  134. if (!IsValueNode<Primitive>(uses_cnode->input(0))) {
  135. break;
  136. }
  137. PrimitivePtr value_node_prim = GetValueNode<PrimitivePtr>(uses_cnode->input(0));
  138. MS_EXCEPTION_IF_NULL(value_node_prim);
  139. if (value_node_prim->name() == TUPLE_GETITEM) {
  140. if (uses_set.size() > 1) {
  141. MS_LOG(EXCEPTION) << "Now only support one output, but got " << uses_set.size();
  142. }
  143. node_to_insert = uses_cnode;
  144. }
  145. }
  146. MS_EXCEPTION_IF_NULL(node_to_insert);
  147. std::reverse(forward_op.begin(), forward_op.end());
  148. // step2:traverse op_list and insert node
  149. for (size_t index = 0; index < forward_op.size(); ++index) {
  150. std::string instance_name_base = FORWARD_OP;
  151. std::string instance_name = instance_name_base + "_" + CreateInstanceName(node, index);
  152. std::vector<AnfNodePtr> forward_input = CreateInput(forward_op[index], node_to_insert, instance_name);
  153. CNodePtr forward_node = func_graph->NewCNode(forward_input); // using NewCNode to creat anfnode
  154. MS_EXCEPTION_IF_NULL(forward_node);
  155. ScopePtr scope = node->scope();
  156. MS_EXCEPTION_IF_NULL(scope);
  157. forward_node->set_scope(scope);
  158. forward_node->set_in_forward_flag(true);
  159. forward_input[0]->set_scope(scope);
  160. (void)manager->Replace(node_to_insert, forward_node); // using Replace function to insert node
  161. }
  162. }
  163. CNodePtr InsertMakeTuple(const AnfNodePtr &prev, uint32_t num, const FuncGraphPtr &func_graph) {
  164. MS_EXCEPTION_IF_NULL(prev);
  165. MS_EXCEPTION_IF_NULL(func_graph);
  166. std::vector<AnfNodePtr> make_tuple_inputs;
  167. make_tuple_inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
  168. for (uint32_t i = 0; i < num; i++) {
  169. std::vector<AnfNodePtr> tuple_get_item_inputs{NewValueNode(prim::kPrimTupleGetItem), prev,
  170. CreatInt32Imm(UintToInt(i))};
  171. auto tuple_get_item = func_graph->NewCNode(tuple_get_item_inputs);
  172. MS_EXCEPTION_IF_NULL(tuple_get_item);
  173. make_tuple_inputs.push_back(tuple_get_item);
  174. }
  175. auto make_tuple = func_graph->NewCNode(make_tuple_inputs);
  176. MS_EXCEPTION_IF_NULL(make_tuple);
  177. FuncGraphManagerPtr manager = func_graph->manager();
  178. MS_EXCEPTION_IF_NULL(manager);
  179. (void)manager->Replace(prev, make_tuple);
  180. return make_tuple;
  181. }
  182. void InsertRedistribution(const RedistributionOpListPtr &redistribution_oplist_ptr, const CNodePtr &node,
  183. const FuncGraphPtr &func_graph, int pos, const CNodePtr &pre_node) {
  184. MS_EXCEPTION_IF_NULL(node);
  185. MS_EXCEPTION_IF_NULL(pre_node);
  186. MS_EXCEPTION_IF_NULL(func_graph);
  187. FuncGraphManagerPtr manager = func_graph->manager();
  188. MS_EXCEPTION_IF_NULL(manager);
  189. if ((redistribution_oplist_ptr->first).size() != (redistribution_oplist_ptr->second).size()) {
  190. MS_LOG(EXCEPTION) << "size of OperatorVector and OutPutInfoVector must be the same!";
  191. }
  192. for (size_t index = 0; index < (redistribution_oplist_ptr->first).size(); ++index) {
  193. if (pos >= SizeToInt(node->inputs().size())) {
  194. MS_LOG(EXCEPTION) << "InsertRedistribution:pos can't be larger than node's inputs'size";
  195. }
  196. // Creat new node
  197. AnfNodePtr target_node = node->input(IntToSize(pos));
  198. MS_EXCEPTION_IF_NULL(target_node);
  199. // Creat instance_name
  200. auto op = (redistribution_oplist_ptr->first)[index];
  201. std::string op_name = (redistribution_oplist_ptr->first)[index].first;
  202. std::string instance_name_base = REDISTRIBUTION_OP;
  203. std::string instance_name = instance_name_base + "_" + CreateInstanceName(pre_node, index) + op_name;
  204. InsertNode(op, node, IntToSize(pos), target_node, func_graph, instance_name);
  205. if ((redistribution_oplist_ptr->second)[index].first) {
  206. target_node = node->input(IntToSize(pos));
  207. MS_EXCEPTION_IF_NULL(target_node);
  208. (void)InsertMakeTuple(target_node, (redistribution_oplist_ptr->second)[index].second, func_graph);
  209. }
  210. }
  211. }
  212. void InsertGetTensorSliceOp(const Operator &op, const CNodePtr &node, const FuncGraphPtr &func_graph, int pos,
  213. const std::string &instance_name) {
  214. if (func_graph == nullptr) {
  215. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: the graph is null, the instance name is " << instance_name;
  216. }
  217. FuncGraphManagerPtr manager = func_graph->manager();
  218. MS_EXCEPTION_IF_NULL(manager);
  219. if (pos >= SizeToInt(node->inputs().size())) {
  220. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: pos can't be larger than node's inputs'size, the instance name is "
  221. << instance_name;
  222. }
  223. // Creat new node
  224. AnfNodePtr pre_node = node->input(IntToSize(pos));
  225. MS_EXCEPTION_IF_NULL(pre_node);
  226. InsertNode(op, node, IntToSize(pos), pre_node, func_graph, instance_name);
  227. }
  228. TensorLayout GetTensorInLayout(const CNodePtr &middle_node, const PrimitivePtr &middle_prim,
  229. const OperatorInfoPtr &distribute_operator) {
  230. TensorInfo tensorinfo_in;
  231. if (middle_prim->name() == TUPLE_GETITEM) {
  232. auto value_node = middle_node->input(2)->cast<ValueNodePtr>();
  233. MS_EXCEPTION_IF_NULL(value_node);
  234. size_t index_s = IntToSize(GetValue<int>(value_node->value()));
  235. if (index_s >= distribute_operator->outputs_tensor_info().size()) {
  236. MS_LOG(EXCEPTION) << "The index out of range, index: " << index_s
  237. << ", vector size: " << distribute_operator->outputs_tensor_info().size();
  238. }
  239. tensorinfo_in = distribute_operator->outputs_tensor_info()[index_s];
  240. } else {
  241. if (distribute_operator->outputs_tensor_info().empty()) {
  242. MS_LOG(EXCEPTION) << "The outputs tensor info is empty";
  243. }
  244. tensorinfo_in = distribute_operator->outputs_tensor_info()[0];
  245. }
  246. return tensorinfo_in.tensor_layout();
  247. }
  248. bool AnfNodeIsPrimitive(const AnfNodePtr &anf_node, const std::string &prim_name) {
  249. MS_EXCEPTION_IF_NULL(anf_node);
  250. auto cnode = anf_node->cast<CNodePtr>();
  251. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  252. return false;
  253. }
  254. auto value_node = cnode->input(0)->cast<ValueNodePtr>();
  255. auto prim = GetValueNode<PrimitivePtr>(value_node);
  256. MS_EXCEPTION_IF_NULL(prim);
  257. if (prim->name() == prim_name) {
  258. return true;
  259. }
  260. return false;
  261. }
  262. std::string GetPrimName(const CNodePtr &node) {
  263. MS_EXCEPTION_IF_NULL(node);
  264. if (!IsValueNode<Primitive>(node->input(0))) {
  265. MS_LOG(EXCEPTION) << "The node is not a primitive";
  266. }
  267. auto value_node = node->input(0)->cast<ValueNodePtr>();
  268. auto prim = GetValueNode<PrimitivePtr>(value_node);
  269. MS_EXCEPTION_IF_NULL(prim);
  270. return prim->name();
  271. }
  272. OperatorInfoPtr GetDistributeOperator(const CNodePtr &node) {
  273. MS_EXCEPTION_IF_NULL(node);
  274. if (!IsParallelCareNode(node)) {
  275. return nullptr;
  276. }
  277. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  278. if (distribute_operator == nullptr) {
  279. MS_LOG(EXCEPTION) << "Distribute operator is nullptr, the prim is " << GetPrimName(node);
  280. }
  281. return distribute_operator;
  282. }
  283. void Redistribution(const std::pair<AnfNodePtr, int> &node_pair, const OperatorInfoPtr &distribute_operator,
  284. const CNodePtr &middle_node, int index, TensorRedistribution tensor_redistribution,
  285. const CNodePtr &pre_node) {
  286. FuncGraphPtr func_graph = middle_node->func_graph();
  287. if (func_graph == nullptr) {
  288. MS_LOG(EXCEPTION) << "Redistribution:get graph failed";
  289. }
  290. CNodePtr next_node = node_pair.first->cast<CNodePtr>();
  291. MS_EXCEPTION_IF_NULL(next_node);
  292. auto middle_value = middle_node->input(0)->cast<ValueNodePtr>();
  293. MS_EXCEPTION_IF_NULL(middle_value);
  294. PrimitivePtr middle_prim = middle_value->value()->cast<PrimitivePtr>();
  295. MS_EXCEPTION_IF_NULL(middle_prim);
  296. OperatorInfoPtr next_distribute_operator = GetDistributeOperator(next_node);
  297. if (next_distribute_operator == nullptr) {
  298. MS_LOG(EXCEPTION) << "Failure: " << next_node->ToString() << " GetDistributeOperator failed";
  299. }
  300. RankList dev_list = distribute_operator->global_device_list();
  301. std::string next_prim_name = GetValueNode<PrimitivePtr>(next_node->input(0))->name();
  302. MS_LOG(DEBUG) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim " << next_prim_name;
  303. MS_LOG(DEBUG) << "Redistribution: middle_node " << middle_node->ToString() << " next_node " << next_node->ToString();
  304. // extract tensor layout in and out
  305. if (distribute_operator->outputs_tensor_info().empty()) {
  306. MS_LOG(WARNING) << "pre_node's tensorinfo_in is empty, operator name is " << distribute_operator->name();
  307. return;
  308. }
  309. if (IntToSize(index - 1) >= next_distribute_operator->inputs_tensor_info().size()) {
  310. MS_LOG(WARNING) << "The index is out of range, the index is " << index - 1 << ", the vector size is "
  311. << next_distribute_operator->inputs_tensor_info().size() << "next operator name is "
  312. << next_distribute_operator->name();
  313. return;
  314. }
  315. TensorInfo tensorinfo_out = next_distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  316. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  317. TensorLayout tensorlayout_in = GetTensorInLayout(middle_node, middle_prim, distribute_operator);
  318. if (tensor_redistribution.Init(tensorlayout_in, tensorlayout_out, dev_list) == FAILED) {
  319. MS_LOG(ERROR) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim : " << next_prim_name;
  320. MS_LOG(ERROR) << "Redistribution: middle_node " << middle_node->ToString() << " next_node "
  321. << next_node->ToString();
  322. DumpGraph(func_graph, "redistribution_error");
  323. MS_LOG(EXCEPTION) << "Failure:tensor_redistribution init failed";
  324. }
  325. RedistributionOpListPtr redistribution_oplist_ptr = tensor_redistribution.InferTensorRedistributionOperatorList();
  326. if (redistribution_oplist_ptr == nullptr) {
  327. MS_LOG(EXCEPTION) << "Failure:InferTensorRedistribution failed";
  328. }
  329. MS_LOG(DEBUG) << "Redistribution size " << redistribution_oplist_ptr->first.size();
  330. if (!redistribution_oplist_ptr->first.empty()) {
  331. // insert node before next node
  332. InsertRedistribution(redistribution_oplist_ptr, next_node, func_graph, node_pair.second, pre_node);
  333. }
  334. }
  335. bool StrategyFound(std::unordered_map<std::string, ValuePtr> attrs) {
  336. auto iter = attrs.find(STRATEGY);
  337. return !((iter == attrs.end()) || (iter->second->type_name() == NONE));
  338. }
  339. bool HasStrategy(const FuncGraphPtr &root) {
  340. AnfNodePtr ret = root->get_return();
  341. MS_EXCEPTION_IF_NULL(ret);
  342. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  343. for (auto &node : all_nodes) {
  344. auto cnode = node->cast<CNodePtr>();
  345. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  346. continue;
  347. }
  348. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  349. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  350. auto attrs = prim->attrs();
  351. if (StrategyFound(attrs)) {
  352. return true;
  353. }
  354. }
  355. return false;
  356. }
  357. bool IsCommunicationOp(const PrimitivePtr &prim) {
  358. MS_EXCEPTION_IF_NULL(prim);
  359. return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end());
  360. }
  361. bool FindCommunicationOp(const std::vector<AnfNodePtr> &all_nodes) {
  362. for (auto &node : all_nodes) {
  363. MS_EXCEPTION_IF_NULL(node);
  364. if (!node->isa<CNode>()) {
  365. continue;
  366. }
  367. auto cnode = node->cast<CNodePtr>();
  368. if (!IsValueNode<Primitive>(cnode->input(0))) {
  369. continue;
  370. }
  371. ValueNodePtr prim_value_node = cnode->input(0)->cast<ValueNodePtr>();
  372. MS_EXCEPTION_IF_NULL(prim_value_node);
  373. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_value_node);
  374. MS_EXCEPTION_IF_NULL(prim);
  375. if (IsCommunicationOp(prim) && cnode->in_forward_flag()) {
  376. MS_EXCEPTION_IF_NULL(prim_value_node->scope());
  377. MS_LOG(INFO) << "The graph contain communication op: " << prim->name() << ", scope name is "
  378. << prim_value_node->scope()->name();
  379. return true;
  380. }
  381. }
  382. return false;
  383. }
  384. bool IsParallelCareNode(const CNodePtr &cnode) {
  385. MS_EXCEPTION_IF_NULL(cnode);
  386. ValueNodePtr prim_node = cnode->input(0)->cast<ValueNodePtr>();
  387. if (prim_node == nullptr) {
  388. return false;
  389. }
  390. PrimitivePtr prim = prim_node->value()->cast<PrimitivePtr>();
  391. if (prim == nullptr) {
  392. return false;
  393. }
  394. if (IsInBlackList(prim)) {
  395. MS_LOG(INFO) << "Parallel don't care node: " << prim->name();
  396. return false;
  397. }
  398. // get_next is not in the forward graph, we need mark the get_next as the forward node
  399. if (prim->name() == GET_NEXT) {
  400. return true;
  401. }
  402. if ((prim->name() == CAST) && !cnode->has_user_data<OperatorInfo>()) {
  403. return false;
  404. }
  405. return cnode->in_forward_flag();
  406. }
  407. void StepRedistribution(const CNodePtr &node, const OperatorInfoPtr &distribute_operator, const CNodePtr &insert_node,
  408. const TensorRedistribution &tensor_redistribution, const CNodePtr &pre_node) {
  409. MS_EXCEPTION_IF_NULL(node->func_graph());
  410. FuncGraphManagerPtr manager = node->func_graph()->manager();
  411. MS_EXCEPTION_IF_NULL(manager);
  412. AnfNodeIndexSet node_set = manager->node_users()[node];
  413. CNodePtr insert_node_new;
  414. if (AnfNodeIsPrimitive(node, MAKE_TUPLE)) {
  415. MS_LOG(INFO) << "No need to insert redistribution op betweend make_tuple node and the next node";
  416. return;
  417. }
  418. if (IsValueNode<Primitive>(node->input(0))) {
  419. auto current_value = node->input(0)->cast<ValueNodePtr>();
  420. MS_EXCEPTION_IF_NULL(current_value);
  421. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  422. MS_EXCEPTION_IF_NULL(current_prim);
  423. insert_node_new = ((current_prim->name() == TUPLE_GETITEM) ? node : insert_node);
  424. } else {
  425. insert_node_new = insert_node;
  426. }
  427. MS_EXCEPTION_IF_NULL(insert_node_new);
  428. for (auto &node_pair : node_set) {
  429. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  430. MS_EXCEPTION_IF_NULL(use_cnode);
  431. if (!IsValueNode<Primitive>(use_cnode->input(0))) {
  432. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  433. } else {
  434. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  435. MS_EXCEPTION_IF_NULL(prim_anf_node);
  436. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  437. MS_EXCEPTION_IF_NULL(node_prim);
  438. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  439. continue;
  440. }
  441. if (IsParallelCareNode(use_cnode) && use_cnode->has_user_data<OperatorInfo>()) {
  442. Redistribution(node_pair, distribute_operator, insert_node_new, node_pair.second, tensor_redistribution,
  443. pre_node);
  444. } else {
  445. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  446. }
  447. }
  448. }
  449. }
  450. void SplitTensor(const AnfNodePtr &node, const CNodePtr &next_node, int index) {
  451. MS_EXCEPTION_IF_NULL(node);
  452. MS_EXCEPTION_IF_NULL(next_node);
  453. OperatorInfoPtr op_info = next_node->user_data<OperatorInfo>();
  454. MS_EXCEPTION_IF_NULL(op_info);
  455. // If the shape of tensor is [] or [1], no need to split it.
  456. Shapes shapes = GetNodeShape(node);
  457. if (shapes.size() != 1) {
  458. MS_LOG(EXCEPTION) << "Split tensor for " << op_info->name()
  459. << ": GetNodeShape for tensor_node, output size is not 1";
  460. }
  461. Shape shape = shapes[0];
  462. std::string shape_str = ShapeToString(shape);
  463. if (shape.empty() || ((shape.size() == 1) && (shape[0] == 1))) {
  464. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape is " << shape_str
  465. << ", no need to split it.";
  466. return;
  467. }
  468. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape of tensor is " << shape_str;
  469. // extract tensor layout
  470. if (IntToSize(index - 1) >= op_info->inputs_tensor_info().size()) {
  471. MS_LOG(EXCEPTION) << "The index is out of range, index is " << index - 1 << ", vector size is "
  472. << op_info->inputs_tensor_info().size();
  473. }
  474. TensorInfo tensor_info = op_info->inputs_tensor_info()[IntToSize(index - 1)];
  475. TensorLayout tensor_layout = tensor_info.tensor_layout();
  476. // Use _GetTensorSlice operator to split the tensor
  477. FuncGraphPtr func_graph = next_node->func_graph(); // only cnode can get the graph
  478. MS_EXCEPTION_IF_NULL(func_graph);
  479. Operator op = CreateGetTensorSliceOp(tensor_layout);
  480. InsertGetTensorSliceOp(op, next_node, func_graph, index, SPLIT_TENSOR);
  481. if (!op_info->sub_ops().empty()) {
  482. auto sub_ops = op_info->sub_ops();
  483. for (size_t i = 0; i < sub_ops.size(); i++) {
  484. if (!sub_ops.at(i).empty()) {
  485. InsertGetTensorSliceOp(sub_ops.at(i).at(0), next_node, func_graph, index, SUB);
  486. }
  487. }
  488. }
  489. }
  490. void StepSplitTensor(const AnfNodePtr &node, const FuncGraphManagerPtr &manager) {
  491. MS_EXCEPTION_IF_NULL(node);
  492. MS_EXCEPTION_IF_NULL(manager);
  493. AnfNodeIndexSet node_set = manager->node_users()[node];
  494. for (auto &node_pair : node_set) {
  495. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  496. if (use_cnode == nullptr || !IsValueNode<Primitive>(use_cnode->input(0))) {
  497. continue;
  498. }
  499. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  500. MS_EXCEPTION_IF_NULL(prim_anf_node);
  501. PrimitivePtr use_cnode_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  502. MS_EXCEPTION_IF_NULL(use_cnode_prim);
  503. if (use_cnode_prim->name() == DEPEND && node_pair.second != 1) {
  504. continue;
  505. }
  506. if (IsParallelCareNode(use_cnode)) {
  507. SplitTensor(node, use_cnode, node_pair.second);
  508. }
  509. }
  510. }
  511. std::vector<AnfNodePtr> ReplaceOpInput(const Operator &replace_op, const std::string &instance_name,
  512. const CNodePtr &node) {
  513. OperatorArgs arg_replace_op = replace_op.second;
  514. ValuePtr pyop_instance = CreatOpInstance(arg_replace_op.first, replace_op.first, instance_name);
  515. if (pyop_instance == nullptr) {
  516. MS_LOG(EXCEPTION) << "Failure: " << replace_op.first << " CreatOpInstance failed";
  517. }
  518. OperatorParams params = arg_replace_op.second;
  519. if (node->inputs().size() < 2) {
  520. // GetNext operator dose not has input
  521. if (node->inputs().size() == 1) {
  522. return {NewValueNode(pyop_instance)};
  523. }
  524. MS_LOG(EXCEPTION) << "Failure: " << node->ToString() << " size is smaller than 2";
  525. }
  526. std::vector<AnfNodePtr> replace_input = {NewValueNode(pyop_instance), node->input(1)};
  527. auto prim = GetValueNode<PrimitivePtr>(node->input(0));
  528. if (prim->name() == EMBEDDING_LOOKUP) {
  529. replace_input = {NewValueNode(pyop_instance), node->input(1), node->input(2)};
  530. }
  531. if (!params.empty()) {
  532. Param param_first = *(params.begin());
  533. int32_t first_position = param_first.second;
  534. if (first_position == 1) {
  535. replace_input.pop_back();
  536. }
  537. for (auto &param : params) {
  538. AnfNodePtr val = NewValueNode(param.first.second);
  539. if (val == nullptr) {
  540. MS_LOG(EXCEPTION) << "Failure:val is nullptr";
  541. }
  542. int32_t position = param.second;
  543. (void)replace_input.insert(replace_input.begin() + position, val);
  544. }
  545. }
  546. return replace_input;
  547. }
  548. void ReplaceOneOp(const Operator &replace_op, const CNodePtr &node) {
  549. FuncGraphPtr func_graph = node->func_graph();
  550. MS_EXCEPTION_IF_NULL(func_graph);
  551. FuncGraphManagerPtr manager = func_graph->manager();
  552. if (manager == nullptr) {
  553. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  554. }
  555. std::string instance_name = CreateInstanceName(node, 0);
  556. std::vector<AnfNodePtr> replace_input;
  557. replace_input = ReplaceOpInput(replace_op, instance_name, node);
  558. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  559. MS_EXCEPTION_IF_NULL(replace_node);
  560. ScopePtr scope = node->scope();
  561. MS_EXCEPTION_IF_NULL(scope);
  562. replace_node->set_scope(scope);
  563. replace_node->set_in_forward_flag(true);
  564. replace_input[0]->set_scope(scope);
  565. (void)manager->Replace(node, replace_node);
  566. }
  567. void StepReplaceOp(OperatorVector replace_op, const CNodePtr &node) {
  568. // step1:get graph manager distribute_operator
  569. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  570. if (distribute_operator == nullptr) {
  571. MS_LOG(EXCEPTION) << "Failure:AddNode error since distribute_operator is nullptr";
  572. }
  573. FuncGraphPtr func_graph = node->func_graph();
  574. MS_EXCEPTION_IF_NULL(func_graph);
  575. FuncGraphManagerPtr manager = func_graph->manager();
  576. if (manager == nullptr) {
  577. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  578. }
  579. // step2:traverse op_list and insert node
  580. std::reverse(replace_op.begin(), replace_op.end());
  581. auto replace_op_info = distribute_operator->replace_op_info();
  582. std::reverse(replace_op_info.begin(), replace_op_info.end());
  583. if (!replace_op_info.empty() && replace_op_info.size() != replace_op.size()) {
  584. MS_LOG(EXCEPTION) << "replace_op_info is not empty and size not equal to replace_op!";
  585. }
  586. bool replace_op_info_flag = !replace_op_info.empty();
  587. for (size_t index = 0; index < replace_op.size(); ++index) {
  588. std::string instance_name = CreateInstanceName(node, index);
  589. std::vector<AnfNodePtr> replace_input;
  590. if (index != replace_op.size() - 1) {
  591. replace_input = CreateInput(replace_op[index], node, instance_name);
  592. } else {
  593. replace_input = ReplaceOpInput(replace_op[index], instance_name, node);
  594. }
  595. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  596. MS_EXCEPTION_IF_NULL(replace_node);
  597. ScopePtr scope = node->scope();
  598. MS_EXCEPTION_IF_NULL(scope);
  599. replace_node->set_scope(scope);
  600. PrimitivePtr prim = GetValueNode<PrimitivePtr>(replace_node->input(0));
  601. if (prim->name() == EMBEDDING_LOOKUP) {
  602. auto attrs = prim->attrs();
  603. attrs[TARGET] = MakeValue(CPU);
  604. (void)prim->SetAttrs(attrs);
  605. }
  606. if (index == replace_op.size() - 1) {
  607. replace_node->set_user_data<OperatorInfo>(node->user_data<OperatorInfo>());
  608. }
  609. replace_node->set_in_forward_flag(true);
  610. replace_input[0]->set_scope(scope);
  611. if (replace_op_info_flag && replace_op_info[index].first) {
  612. auto new_cnode = InsertMakeTuple(replace_node, replace_op_info[index].second, func_graph);
  613. (void)manager->Replace(node, new_cnode); // using Replace function to insert node
  614. } else {
  615. (void)manager->Replace(node, replace_node); // using Replace function to insert node
  616. }
  617. }
  618. MS_LOG(INFO) << "Insert ReplaceOp success for " << distribute_operator->name();
  619. }
  620. bool IsSomePrimitive(const CNodePtr &cnode, const std::string &name) {
  621. ValueNodePtr anf_node = cnode->input(0)->cast<ValueNodePtr>();
  622. MS_EXCEPTION_IF_NULL(anf_node);
  623. PrimitivePtr prim = anf_node->value()->cast<PrimitivePtr>();
  624. return (prim->name() == name);
  625. }
  626. void StepReplaceGraph(const ReplaceGraphPtr &replace_graph, const CNodePtr &node) {
  627. MS_EXCEPTION_IF_NULL(replace_graph);
  628. MS_EXCEPTION_IF_NULL(node);
  629. MS_EXCEPTION_IF_NULL(replace_graph->second);
  630. FuncGraphPtr func_graph = node->func_graph();
  631. MS_EXCEPTION_IF_NULL(func_graph);
  632. FuncGraphManagerPtr manager = func_graph->manager();
  633. if (manager == nullptr) {
  634. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  635. }
  636. for (auto &replace_input : replace_graph->first) {
  637. auto pre_node = node->input(IntToSize(replace_input.second));
  638. manager->SetEdge(replace_input.first, 1, pre_node);
  639. }
  640. // "(void)manager->Replace(replace_graph->first, pre_node);" can not be called
  641. auto replace_output = replace_graph->second;
  642. MS_EXCEPTION_IF_NULL(replace_output);
  643. (void)manager->Replace(node, replace_output);
  644. }
  645. int32_t GetTupleGetItemIndex(const CNodePtr &cnode) {
  646. MS_EXCEPTION_IF_NULL(cnode);
  647. if (cnode->inputs().size() != 3) {
  648. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is not 3";
  649. }
  650. if (!cnode->input(2)->isa<ValueNode>()) {
  651. MS_LOG(EXCEPTION) << "The index of tuple getitem is not a value node";
  652. }
  653. ValuePtr tuple_index_value = GetValueNode(cnode->input(2));
  654. MS_EXCEPTION_IF_NULL(tuple_index_value);
  655. if (!tuple_index_value->isa<Int32Imm>()) {
  656. MS_LOG(EXCEPTION) << "The index of tuple getitem is not int32";
  657. }
  658. return tuple_index_value->cast<Int32ImmPtr>()->value();
  659. }
  660. // Judge whether the node is a loss, and if there are multiple outputs,
  661. // get which output is a grad according to the tuple getitem.
  662. // Currently, it is not supported that the sens is a tuple.
  663. LossNodeInfo GetLossNodeInfo(const AnfNodePtr &loss_node) {
  664. MS_EXCEPTION_IF_NULL(loss_node);
  665. FuncGraphPtr sub_graph = loss_node->func_graph();
  666. MS_EXCEPTION_IF_NULL(sub_graph);
  667. CNodePtr return_node = sub_graph->get_return();
  668. MS_EXCEPTION_IF_NULL(return_node);
  669. if (return_node->inputs().size() < 2) {
  670. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  671. }
  672. AnfNodePtr pre_node = return_node->input(1);
  673. MS_EXCEPTION_IF_NULL(pre_node);
  674. LossNodeInfo node_info;
  675. // return -> cast
  676. auto pre_cnode = pre_node->cast<CNodePtr>();
  677. MS_EXCEPTION_IF_NULL(pre_cnode);
  678. auto pre_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  679. if (pre_prim->name() == CAST && !pre_cnode->has_user_data<OperatorInfo>()) {
  680. pre_node = pre_cnode->input(1);
  681. }
  682. // return -> loss
  683. if (pre_node == loss_node) {
  684. node_info.has_tuple_getitem = false;
  685. node_info.dout_index = 0;
  686. return node_info;
  687. }
  688. // return -> tuple_getitem -> loss
  689. auto cnode = pre_node->cast<CNodePtr>();
  690. MS_EXCEPTION_IF_NULL(cnode);
  691. auto current_value = cnode->input(0)->cast<ValueNodePtr>();
  692. MS_EXCEPTION_IF_NULL(current_value);
  693. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  694. MS_EXCEPTION_IF_NULL(current_prim);
  695. // size of common cnode is larger than 1
  696. if (cnode->inputs().size() < 2) {
  697. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is smaller than 2";
  698. }
  699. if ((current_prim->name() == TUPLE_GETITEM) && (cnode->input(1) == loss_node)) {
  700. // size of tuple_getitem cnode is 3
  701. auto tuple_index = GetTupleGetItemIndex(cnode);
  702. node_info.has_tuple_getitem = true;
  703. node_info.dout_index = tuple_index;
  704. return node_info;
  705. }
  706. MS_LOG(EXCEPTION) << "Invalid loss";
  707. }
  708. void InsertVirtualDivOp(const VirtualDivOp &virtual_div_op, const CNodePtr &node) {
  709. MS_EXCEPTION_IF_NULL(node);
  710. size_t node_size = node->inputs().size();
  711. FuncGraphPtr func_graph = node->func_graph();
  712. MS_EXCEPTION_IF_NULL(func_graph);
  713. FuncGraphManagerPtr manager = func_graph->manager();
  714. MS_EXCEPTION_IF_NULL(manager);
  715. for (size_t index = 1; index < node_size; ++index) {
  716. AnfNodePtr input = node->input(index);
  717. MS_EXCEPTION_IF_NULL(input);
  718. if (!input->isa<CNode>() && !input->isa<Parameter>()) { // if it is not a tensor, continue
  719. MS_LOG(INFO) << "insert div op: the index " << index << " is not tensor, skip";
  720. continue;
  721. }
  722. for (size_t pos = 0; pos < virtual_div_op.size(); ++pos) {
  723. std::string instance_name = CreateInstanceName(node, pos);
  724. InsertNode(virtual_div_op[pos], node, index, node->input(index), func_graph, instance_name);
  725. }
  726. MS_LOG(INFO) << "insert div op for input index " << index << " of node";
  727. }
  728. }
  729. std::pair<AnfNodePtr, bool> FindParameter(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  730. if (!node->isa<Parameter>() && !node->isa<CNode>() && !node->isa<ValueNode>()) {
  731. return std::make_pair(nullptr, false);
  732. } else if (node->isa<Parameter>()) {
  733. return std::make_pair(node, false);
  734. } else if (node->isa<ValueNode>()) {
  735. if (IsValueNode<RefKey>(node)) {
  736. std::vector<AnfNodePtr> param_v = FindParameterByRefKeyNode(node, func_graph);
  737. if (param_v.size() != 1) {
  738. MS_LOG(EXCEPTION) << "FindParameterByRefKeyNode failed, return vector size must be 1, real is "
  739. << param_v.size();
  740. }
  741. return std::make_pair(node, true);
  742. }
  743. return std::make_pair(nullptr, false);
  744. } else {
  745. CNodePtr cnode = node->cast<CNodePtr>();
  746. MS_EXCEPTION_IF_NULL(cnode);
  747. if (!IsValueNode<Primitive>(cnode->input(0))) {
  748. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  749. if (!FindParameter(cnode->input(index), func_graph).first) {
  750. continue;
  751. }
  752. return FindParameter(cnode->input(index), func_graph);
  753. }
  754. } else {
  755. if (IsParallelCareNode(cnode)) {
  756. return std::make_pair(nullptr, false);
  757. } else {
  758. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  759. MS_EXCEPTION_IF_NULL(prim_anf_node);
  760. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  761. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  762. MS_EXCEPTION_IF_NULL(prim);
  763. if (prim->name() == DEPEND && index != 1) {
  764. continue;
  765. }
  766. if (!FindParameter(cnode->input(index), func_graph).first) {
  767. continue;
  768. }
  769. return FindParameter(cnode->input(index), func_graph);
  770. }
  771. }
  772. }
  773. }
  774. return std::make_pair(nullptr, false);
  775. }
  776. std::pair<bool, CNodePtr> FindCNode(const AnfNodePtr &anode, const std::string &name, const FuncGraphPtr &func_graph) {
  777. MS_EXCEPTION_IF_NULL(anode);
  778. MS_EXCEPTION_IF_NULL(anode->func_graph());
  779. FuncGraphManagerPtr manager = anode->func_graph()->manager();
  780. MS_EXCEPTION_IF_NULL(manager);
  781. AnfNodeIndexSet node_set = manager->node_users()[anode];
  782. bool result = false;
  783. CNodePtr cnode_return = nullptr;
  784. for (auto &node_pair : node_set) {
  785. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  786. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  787. continue;
  788. }
  789. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  790. MS_EXCEPTION_IF_NULL(prim_anf_node);
  791. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  792. MS_EXCEPTION_IF_NULL(node_prim);
  793. if (node_prim->name() == name && node_pair.second == 1) {
  794. if (use_apply->func_graph() == func_graph) {
  795. result = true;
  796. cnode_return = use_apply;
  797. MS_LOG(INFO) << "Find Primitive " << name << " in the same func_graph";
  798. continue;
  799. }
  800. MS_LOG(INFO) << "Find Primitive " << name << " in different func_graph";
  801. }
  802. }
  803. return std::make_pair(result, cnode_return);
  804. }
  805. bool IsCastBeforMirror(const CNodePtr &node, size_t index) {
  806. // only if cast_before_mirror is true, pre node is cast and type is not float32 return true
  807. if (!ParallelContext::GetInstance()->cast_before_mirror()) {
  808. return false;
  809. }
  810. auto pre_node = node->input(index);
  811. MS_EXCEPTION_IF_NULL(pre_node);
  812. auto cnode = pre_node->cast<CNodePtr>();
  813. if (cnode == nullptr || !IsValueNode<Primitive>(cnode->input(0))) {
  814. return false;
  815. }
  816. auto pre_value_node = cnode->input(0)->cast<ValueNodePtr>();
  817. MS_EXCEPTION_IF_NULL(pre_value_node);
  818. auto pre_prim = pre_value_node->value()->cast<PrimitivePtr>();
  819. MS_EXCEPTION_IF_NULL(pre_prim);
  820. if (pre_prim->name() != CAST) {
  821. return false;
  822. }
  823. auto node_type = pre_node->Type();
  824. MS_EXCEPTION_IF_NULL(node_type);
  825. if (!node_type->isa<mindspore::TensorType>()) {
  826. MS_LOG(EXCEPTION) << "Unknown type.";
  827. }
  828. auto input_element_type = node_type->cast<mindspore::TensorTypePtr>()->element();
  829. MS_EXCEPTION_IF_NULL(input_element_type);
  830. auto type_id = input_element_type->type_id();
  831. return (type_id != kNumberTypeFloat32);
  832. }
  833. void InsertMirrorOps(const MirrorOps &mirror_ops, const CNodePtr &node) {
  834. MS_EXCEPTION_IF_NULL(node);
  835. size_t node_size = node->inputs().size();
  836. FuncGraphPtr func_graph = node->func_graph();
  837. MS_EXCEPTION_IF_NULL(func_graph);
  838. FuncGraphManagerPtr manager = func_graph->manager();
  839. MS_EXCEPTION_IF_NULL(manager);
  840. if ((node->inputs().size() == 2) && AnfNodeIsPrimitive(node->input(1), MAKE_TUPLE)) {
  841. MS_LOG(INFO) << "The mirror for " << GetPrimName(node) << " has handle by make_tuple node";
  842. return;
  843. }
  844. if (mirror_ops.size() != node_size - 1) {
  845. MS_LOG(EXCEPTION) << "Mirrorops's size is wrong! mirror_ops size is " << mirror_ops.size() << ", node_size is "
  846. << node_size - 1;
  847. }
  848. for (size_t index = 1; index < node_size; ++index) {
  849. OperatorVector backward_op = mirror_ops[index - 1];
  850. if (backward_op.empty()) {
  851. continue;
  852. }
  853. std::pair<AnfNodePtr, bool> param_node_pair = FindParameter(node->input(index), func_graph);
  854. if (!param_node_pair.first) {
  855. continue;
  856. }
  857. // not a RefKey
  858. if (!param_node_pair.second) {
  859. auto next_cnode = FindCNode(param_node_pair.first, MIRROR_OPERATOR, func_graph);
  860. // if there is already a MirrorOp in the same graph, use MirrorOp CNode as a input instead
  861. if (next_cnode.first) {
  862. MS_EXCEPTION_IF_NULL(next_cnode.second);
  863. manager->SetEdge(node, SizeToInt(index), next_cnode.second);
  864. continue;
  865. }
  866. }
  867. // if the parameter found is a RefKey, or no MirrorOp is found in the same graph, insert a new MirrorOp
  868. // only one MirrorOp in backward_op
  869. if (backward_op.size() != 1) {
  870. MS_LOG(EXCEPTION) << "backward_op size must be 1, real is " << backward_op.size();
  871. }
  872. std::string instance_name = MIRROR_OP;
  873. if (IsCastBeforMirror(node, index)) {
  874. for (auto &op : backward_op) {
  875. // insert new node before the node
  876. CNodePtr cnode = node->input(index)->cast<CNodePtr>();
  877. MS_EXCEPTION_IF_NULL(cnode);
  878. AnfNodePtr pre_node = cnode->input(1);
  879. InsertNode(op, cnode, size_t(1), pre_node, func_graph, instance_name);
  880. }
  881. } else {
  882. for (auto &op : backward_op) {
  883. AnfNodePtr pre_node = node->input(index);
  884. InsertNode(op, node, index, pre_node, func_graph, instance_name);
  885. }
  886. }
  887. }
  888. }
  889. void BackwardCommunication(const OperatorInfoPtr &distribute_operator, const CNodePtr &node,
  890. const std::vector<std::pair<CNodePtr, CNodePtr>> &sens_loss_pairs) {
  891. MS_EXCEPTION_IF_NULL(distribute_operator);
  892. MS_EXCEPTION_IF_NULL(node);
  893. bool is_loss_cnode =
  894. std::any_of(sens_loss_pairs.begin(), sens_loss_pairs.end(),
  895. [node](const std::pair<CNodePtr, CNodePtr> &element) { return element.second == node; });
  896. MirrorOps mirror_ops = distribute_operator->mirror_ops();
  897. VirtualDivOp virtual_div_op = distribute_operator->virtual_div_op();
  898. // insert mirror op
  899. if (!mirror_ops.empty()) {
  900. MS_LOG(INFO) << "insert mirror op for " << distribute_operator->name();
  901. InsertMirrorOps(mirror_ops, node);
  902. }
  903. // insert virtual div op
  904. if (!virtual_div_op.empty() && is_loss_cnode) {
  905. MS_LOG(INFO) << "insert virtual div op for " << distribute_operator->name();
  906. InsertVirtualDivOp(virtual_div_op, node);
  907. }
  908. }
  909. std::string GetDisOpName(const std::string &prim_name) {
  910. std::string op_name = prim_name;
  911. if (!prim_name.empty() && (prim_name[0] == '_')) {
  912. op_name = prim_name.substr(1);
  913. }
  914. return op_name + "Info";
  915. }
  916. OperatorInfoPtr OperatorInstanceByName(const std::string &name, const PrimitiveAttrs &attrs,
  917. const std::vector<Shapes> &shape_list) {
  918. if (shape_list.size() != 2) {
  919. MS_LOG(ERROR) << "The size of shape list is not 2";
  920. return nullptr;
  921. }
  922. if (name.length() == 0) {
  923. MS_LOG(EXCEPTION) << "Length of name is zero!";
  924. }
  925. std::string distribute_opname = GetDisOpName(name);
  926. if (name == GATHERV2) {
  927. distribute_opname = name + "PInfo";
  928. auto data_parallel_iter = attrs.find(DATA_PARALLEL);
  929. if (data_parallel_iter != attrs.end()) {
  930. MS_EXCEPTION_IF_NULL(data_parallel_iter->second);
  931. if (!data_parallel_iter->second->isa<BoolImm>()) {
  932. MS_LOG(EXCEPTION) << ": data_parallel flag's type is not a bool.";
  933. }
  934. bool data_parallel = data_parallel_iter->second->cast<BoolImmPtr>()->value();
  935. if (data_parallel) {
  936. distribute_opname = name + "Info";
  937. }
  938. }
  939. }
  940. OperatorInfoPtr operator_ =
  941. (OperatorInfoPtr)DynCreator::Instance().Creat(distribute_opname, shape_list[0], shape_list[1], attrs, TOTAL_OPS);
  942. if (operator_ == nullptr) {
  943. MS_LOG(INFO) << "Creat " << name << " failed";
  944. return nullptr;
  945. }
  946. std::string origin_name = operator_->name();
  947. operator_->set_name(origin_name + std::to_string(TOTAL_OPS));
  948. MS_LOG(INFO) << "Successfully created operator " << origin_name;
  949. ++TOTAL_OPS;
  950. return operator_;
  951. }
  952. OperatorInfoPtr OperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  953. const std::vector<Shapes> &shape_list) {
  954. MS_EXCEPTION_IF_NULL(prim);
  955. OperatorInfoPtr operator_ = OperatorInstanceByName(prim->name(), attrs, shape_list);
  956. if ((operator_ == nullptr) && (prim->name() != MAKE_TUPLE)) {
  957. MS_LOG(INFO) << "Creat " << prim->name() << " failed, use batch parallel";
  958. operator_ = OperatorInstanceByName(BATCH_PARALLEL, attrs, shape_list);
  959. MS_EXCEPTION_IF_NULL(operator_);
  960. }
  961. return operator_;
  962. }
  963. OperatorInfoPtr NewOperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  964. std::vector<Shapes> shape_list) {
  965. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  966. for (size_t i = 0; i < shape_list[0].size(); ++i) {
  967. MS_LOG(INFO) << "No: " << i << " input's shape: " << ShapeToString(shape_list[0][i]);
  968. }
  969. return operator_;
  970. }
  971. StrategyPtr ExtractStrategy(std::unordered_map<std::string, ValuePtr> attrs) {
  972. ValueTuplePtr var = attrs[STRATEGY]->cast<ValueTuplePtr>();
  973. StrategyPtr strategyPtr;
  974. MS_LOG(INFO) << "Extract information: strategy " << attrs[STRATEGY]->ToString();
  975. if (var == nullptr) {
  976. MS_LOG(EXCEPTION) << "Strategy value is nullptr";
  977. }
  978. if (var->size() > 0) {
  979. std::vector<ValuePtr> elements = var->value();
  980. Strategys strategy;
  981. for (uint32_t index = 0; index < elements.size(); ++index) {
  982. Dimensions dim;
  983. if (elements[index]->isa<ValueSequeue>()) {
  984. ValueTuplePtr value_tuple = elements[index]->cast<ValueTuplePtr>();
  985. std::vector<ValuePtr> value_vector = value_tuple->value();
  986. (void)std::transform(
  987. value_vector.begin(), value_vector.end(), std::back_inserter(dim), [](const ValuePtr &value) {
  988. return value->isa<Int64Imm>() ? GetValue<int64_t>(value) : static_cast<int64_t>(GetValue<int>(value));
  989. });
  990. strategy.push_back(dim);
  991. } else {
  992. MS_LOG(EXCEPTION) << "Failure:Strategy's format is wrong! Need ValueSequeue";
  993. }
  994. }
  995. if (strategy.empty()) {
  996. MS_LOG(EXCEPTION) << "ExtractStrategy:failed to extract strategy";
  997. }
  998. strategyPtr = NewStrategy(0, strategy);
  999. }
  1000. return strategyPtr;
  1001. }
  1002. Shapes GetNodeShape(const AnfNodePtr &node) {
  1003. MS_EXCEPTION_IF_NULL(node);
  1004. Shapes shapes;
  1005. BaseShapePtr base_shape_ptr = node->Shape();
  1006. if (node->isa<CNode>()) {
  1007. auto cnode = node->cast<CNodePtr>();
  1008. if (IsValueNode<Primitive>(cnode->input(0))) {
  1009. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1010. MS_EXCEPTION_IF_NULL(prim);
  1011. if (prim->name() == MAKEREF) {
  1012. AnfNodePtr ref_node = cnode->input(1);
  1013. auto func_graph = cnode->func_graph();
  1014. MS_EXCEPTION_IF_NULL(ref_node);
  1015. MS_EXCEPTION_IF_NULL(func_graph);
  1016. return GetRefKeyNodeShape(ref_node, func_graph);
  1017. }
  1018. }
  1019. if (cnode->input(0)->isa<CNode>()) {
  1020. if (cnode->inputs().size() < 2) {
  1021. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " size is samller than 2";
  1022. }
  1023. base_shape_ptr = cnode->input(1)->Shape();
  1024. }
  1025. }
  1026. if (base_shape_ptr == nullptr) {
  1027. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " shape_ptr is nullptr, full name is "
  1028. << node->fullname_with_scope();
  1029. }
  1030. auto tuple_shape_ptr = dyn_cast<abstract::TupleShape>(base_shape_ptr);
  1031. if (tuple_shape_ptr != nullptr) {
  1032. auto tuple_shape = tuple_shape_ptr->shape();
  1033. for (auto &shape : tuple_shape) {
  1034. auto each_shape = dyn_cast<abstract::Shape>(shape);
  1035. MS_EXCEPTION_IF_NULL(each_shape);
  1036. std::vector<int> shape_int = each_shape->shape();
  1037. Shape new_shape;
  1038. (void)std::transform(shape_int.begin(), shape_int.end(), std::back_inserter(new_shape),
  1039. [](const int &value) { return static_cast<int64_t>(value); });
  1040. shapes.push_back(new_shape);
  1041. }
  1042. } else {
  1043. auto shape_ptr = dyn_cast<abstract::Shape>(base_shape_ptr);
  1044. MS_EXCEPTION_IF_NULL(shape_ptr);
  1045. std::vector<int> shape_int = shape_ptr->shape();
  1046. Shape new_shape;
  1047. (void)std::transform(shape_int.begin(), shape_int.end(), std::back_inserter(new_shape),
  1048. [](const int &value) { return static_cast<int64_t>(value); });
  1049. shapes.push_back(new_shape);
  1050. }
  1051. return shapes;
  1052. }
  1053. std::vector<AnfNodePtr> FindParameterByRefKeyNode(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1054. MS_EXCEPTION_IF_NULL(node);
  1055. MS_EXCEPTION_IF_NULL(func_graph);
  1056. std::vector<AnfNodePtr> parameters;
  1057. if (!IsValueNode<RefKey>(node)) {
  1058. MS_LOG(ERROR) << "The node is not a ref key";
  1059. return parameters;
  1060. }
  1061. auto ref_key = GetValueNode<RefKeyPtr>(node);
  1062. MS_EXCEPTION_IF_NULL(ref_key);
  1063. auto name = ref_key->tag();
  1064. auto manager = func_graph->manager();
  1065. MS_EXCEPTION_IF_NULL(manager);
  1066. auto roots = manager->roots();
  1067. if (roots.size() != 1) {
  1068. MS_LOG(ERROR) << "The size of roots ( " << roots.size() << " ) is not 1";
  1069. return parameters;
  1070. }
  1071. FuncGraphPtr root_g = roots.back();
  1072. MS_EXCEPTION_IF_NULL(root_g);
  1073. for (auto &param_node : root_g->parameters()) {
  1074. auto param = param_node->cast<ParameterPtr>();
  1075. if (param && (name == param->name())) {
  1076. parameters.push_back(param_node);
  1077. MS_LOG(INFO) << "The name of ref key is: " << name;
  1078. return parameters;
  1079. }
  1080. }
  1081. MS_LOG(ERROR) << "The name of ref key is: " << name << ", but have not found the parameter";
  1082. return parameters;
  1083. }
  1084. Shapes GetRefKeyNodeShape(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1085. MS_EXCEPTION_IF_NULL(node);
  1086. MS_EXCEPTION_IF_NULL(func_graph);
  1087. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(node, func_graph);
  1088. if (parameters.size() != 1) {
  1089. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1090. }
  1091. Shapes input_shapes;
  1092. input_shapes = GetNodeShape(parameters[0]);
  1093. if (input_shapes.size() != 1) {
  1094. MS_LOG(EXCEPTION) << "Get input shape failed";
  1095. }
  1096. MS_LOG(INFO) << "The parameter shape is " << ShapeToString(input_shapes[0]);
  1097. return input_shapes;
  1098. }
  1099. std::vector<Shapes> ExtractShape(const CNodePtr &node) {
  1100. MS_EXCEPTION_IF_NULL(node);
  1101. Shapes shape_inputs, shape_outputs;
  1102. std::vector<Shapes> shape_all;
  1103. std::vector<AnfNodePtr> all_inputs = node->inputs();
  1104. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  1105. size_t inputs_size = all_inputs.size();
  1106. for (size_t i = 1; i < inputs_size; ++i) {
  1107. Shapes input_shapes;
  1108. AnfNodePtr input = all_inputs[i];
  1109. if (IsValueNode<RefKey>(input)) {
  1110. auto func_graph = node->func_graph();
  1111. MS_EXCEPTION_IF_NULL(func_graph);
  1112. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(input, func_graph);
  1113. if (parameters.size() != 1) {
  1114. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1115. }
  1116. std::pair<AnfNodePtr, int> node_pair = std::make_pair(node, SizeToInt(i));
  1117. g_RefMap[parameters[0]] = node_pair;
  1118. input_shapes = GetRefKeyNodeShape(input, func_graph);
  1119. } else if (IsValueNode<Tensor>(input) || input->isa<CNode>() || input->isa<Parameter>()) {
  1120. input_shapes = GetNodeShape(input);
  1121. } else {
  1122. continue;
  1123. }
  1124. if (input_shapes.size() != 1) {
  1125. if (inputs_size == 2) { // like concat
  1126. shape_inputs = input_shapes;
  1127. break;
  1128. } else {
  1129. MS_LOG(EXCEPTION) << "ExtractShape: Get input shape failed";
  1130. }
  1131. }
  1132. shape_inputs.push_back(input_shapes[0]);
  1133. }
  1134. shape_all.push_back(shape_inputs);
  1135. // extract out shape
  1136. shape_outputs = GetNodeShape(node);
  1137. shape_all.push_back(shape_outputs);
  1138. return shape_all;
  1139. }
  1140. std::pair<AnfNodePtr, int> FindParallelCareNode(const AnfNodePtr &node) {
  1141. MS_EXCEPTION_IF_NULL(node);
  1142. FuncGraphPtr func_graph = node->func_graph();
  1143. MS_EXCEPTION_IF_NULL(func_graph);
  1144. FuncGraphManagerPtr manager = func_graph->manager();
  1145. MS_EXCEPTION_IF_NULL(manager);
  1146. AnfNodeIndexSet node_set = manager->node_users()[node];
  1147. for (auto &node_pair : node_set) {
  1148. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1149. MS_EXCEPTION_IF_NULL(cnode);
  1150. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1151. continue;
  1152. }
  1153. ValueNodePtr prim_node_anf = cnode->input(0)->cast<ValueNodePtr>();
  1154. MS_EXCEPTION_IF_NULL(prim_node_anf);
  1155. PrimitivePtr node_prim = prim_node_anf->value()->cast<PrimitivePtr>();
  1156. MS_EXCEPTION_IF_NULL(node_prim);
  1157. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1158. continue;
  1159. }
  1160. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1161. return node_pair;
  1162. } else if (FindParallelCareNode(node_pair.first).first != nullptr) {
  1163. return FindParallelCareNode(node_pair.first);
  1164. }
  1165. }
  1166. return std::make_pair(nullptr, 0);
  1167. }
  1168. std::pair<AnfNodePtr, int> FindSubGraph(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  1169. MS_EXCEPTION_IF_NULL(graph);
  1170. MS_EXCEPTION_IF_NULL(parameter);
  1171. FuncGraphManagerPtr manager = graph->manager();
  1172. MS_EXCEPTION_IF_NULL(manager);
  1173. std::pair<AnfNodePtr, int> prim_anf_node_pair = FindParallelCareNode(parameter);
  1174. if (prim_anf_node_pair.first != nullptr) {
  1175. return prim_anf_node_pair;
  1176. } else {
  1177. AnfNodeIndexSet param_sub_set = manager->node_users()[parameter];
  1178. for (auto &param_pair : param_sub_set) {
  1179. CNodePtr graph_cnode = param_pair.first->cast<CNodePtr>();
  1180. if ((graph_cnode == nullptr) || !graph_cnode->input(0)->isa<CNode>()) {
  1181. continue;
  1182. }
  1183. CNodePtr graph_cnode_inp0 = graph_cnode->input(0)->cast<CNodePtr>();
  1184. if (!IsValueNode<FuncGraph>(graph_cnode_inp0->input(1))) {
  1185. continue;
  1186. }
  1187. FuncGraphPtr graph_sub = GetValueNode<FuncGraphPtr>(graph_cnode_inp0->input(1));
  1188. auto parameters = graph_sub->parameters();
  1189. if (IntToSize(param_pair.second - 1) >= parameters.size()) {
  1190. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1191. << parameters.size();
  1192. }
  1193. std::pair<AnfNodePtr, int> res = FindSubGraph(graph_sub, parameters[IntToSize(param_pair.second - 1)]);
  1194. if (res.first != nullptr) {
  1195. return res;
  1196. }
  1197. }
  1198. }
  1199. return std::make_pair(nullptr, 0);
  1200. }
  1201. void SetParallelShape(const AnfNodePtr &parameter, const std::pair<AnfNodePtr, int> &res) {
  1202. MS_EXCEPTION_IF_NULL(parameter);
  1203. AbstractBasePtr abstract = parameter->abstract();
  1204. MS_EXCEPTION_IF_NULL(abstract);
  1205. MS_LOG(DEBUG) << "SetParallelShape " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1206. CNodePtr cnode = res.first->cast<CNodePtr>();
  1207. MS_EXCEPTION_IF_NULL(cnode);
  1208. OperatorInfoPtr distribute_operator = cnode->user_data<OperatorInfo>();
  1209. if (distribute_operator == nullptr) {
  1210. MS_LOG(EXCEPTION) << "Failure:node " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1211. }
  1212. if (IntToSize(res.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1213. MS_LOG(EXCEPTION) << "The index is out of range, index is " << res.second - 1 << ", vector size is "
  1214. << distribute_operator->inputs_tensor_info().size();
  1215. }
  1216. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(res.second - 1)];
  1217. Shape slice_shape = tensorinfo_in.slice_shape();
  1218. MS_LOG(INFO) << "SetParallelShape slice_shape " << parameter->ToString() << " shape "
  1219. << MakeValue(slice_shape)->ToString() << ", op name is " << distribute_operator->name();
  1220. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1221. MS_EXCEPTION_IF_NULL(parallel_shape);
  1222. // Don't modify it in-place as the pointer of this AbstractValue may used as cache key in StaticAnalysis.
  1223. auto cloned_abstract = abstract->Clone();
  1224. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1225. cloned_abstract->set_shape(parallel_shape);
  1226. parameter->set_abstract(cloned_abstract);
  1227. TensorLayout tensor_layout = tensorinfo_in.tensor_layout();
  1228. ParameterPtr parameter_ptr = parameter->cast<ParameterPtr>();
  1229. MS_EXCEPTION_IF_NULL(parameter_ptr);
  1230. parameter_ptr->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(tensor_layout));
  1231. }
  1232. void CoverSliceShape(const FuncGraphPtr &root) {
  1233. MS_EXCEPTION_IF_NULL(root);
  1234. auto parameters = root->parameters();
  1235. for (auto &parameter : parameters) {
  1236. MS_EXCEPTION_IF_NULL(parameter->Shape());
  1237. auto iter = g_RefMap.find(parameter);
  1238. if (iter != g_RefMap.end()) {
  1239. SetParallelShape(parameter, g_RefMap[parameter]);
  1240. continue;
  1241. }
  1242. std::pair<AnfNodePtr, int> res = FindSubGraph(root, parameter);
  1243. if (res.first == nullptr) {
  1244. MS_LOG(INFO) << "Parameter " << parameter->ToString() << " don't need to set parallel shape";
  1245. } else {
  1246. SetParallelShape(parameter, res);
  1247. MS_LOG(DEBUG) << "Parameter " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1248. }
  1249. }
  1250. g_RefMap.clear();
  1251. }
  1252. bool ParameterIsCloned(const AnfNodePtr &parameter_node) {
  1253. MS_EXCEPTION_IF_NULL(parameter_node);
  1254. auto cloned_parameter = parameter_node->cast<ParameterPtr>();
  1255. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1256. // find the clone parameter
  1257. if (!cloned_parameter->has_default()) {
  1258. return false;
  1259. }
  1260. auto obj = py::cast(cloned_parameter->default_param());
  1261. auto param_value = py::cast<ParamValuePtr>(obj.attr("_value"));
  1262. if (param_value == nullptr) {
  1263. return false;
  1264. }
  1265. bool cloned = param_value->cloned();
  1266. if (!cloned) {
  1267. return false;
  1268. }
  1269. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name() << " is cloned";
  1270. return true;
  1271. }
  1272. void SetClonedTensorShapeForOptimizer(const FuncGraphPtr &root) {
  1273. MS_EXCEPTION_IF_NULL(root);
  1274. for (auto &cloned_parameter_node : root->parameters()) {
  1275. MS_EXCEPTION_IF_NULL(cloned_parameter_node);
  1276. auto cloned_parameter = cloned_parameter_node->cast<ParameterPtr>();
  1277. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1278. if (!ParameterIsCloned(cloned_parameter_node)) {
  1279. continue;
  1280. }
  1281. auto obj = py::cast(cloned_parameter->default_param());
  1282. auto param_value = py::cast<ParamValuePtr>(obj.attr("_value"));
  1283. if (param_value == nullptr) {
  1284. continue;
  1285. }
  1286. // get the cloned index
  1287. int32_t cloned_index = param_value->cloned_index();
  1288. // find the be cloned parameter
  1289. bool found_be_cloned_parameter = false;
  1290. ParameterPtr cloned_from_parameter = nullptr;
  1291. AnfNodePtr cloned_from_node = nullptr;
  1292. for (auto &be_cloned_parameter_node : root->parameters()) {
  1293. MS_EXCEPTION_IF_NULL(be_cloned_parameter_node);
  1294. auto be_cloned_parameter = be_cloned_parameter_node->cast<ParameterPtr>();
  1295. MS_EXCEPTION_IF_NULL(be_cloned_parameter);
  1296. if (!be_cloned_parameter->has_default()) {
  1297. continue;
  1298. }
  1299. const auto &param_value_cloned = be_cloned_parameter->default_param();
  1300. auto obj_in = py::cast(param_value_cloned);
  1301. auto param_value_in = py::cast<ParamValuePtr>(obj_in.attr("_value"));
  1302. if (param_value_in == nullptr) {
  1303. continue;
  1304. }
  1305. if (!param_value_in->be_cloned()) {
  1306. continue;
  1307. }
  1308. // get the be cloned index
  1309. auto &be_cloned_index = param_value_in->be_cloned_index();
  1310. if (std::find(be_cloned_index.begin(), be_cloned_index.end(), cloned_index) != be_cloned_index.end()) {
  1311. found_be_cloned_parameter = true;
  1312. cloned_from_parameter = be_cloned_parameter;
  1313. cloned_from_node = be_cloned_parameter_node;
  1314. }
  1315. }
  1316. if (found_be_cloned_parameter) {
  1317. // set the shape and tensor layout for cloned parameter
  1318. cloned_parameter->set_user_data<TensorLayout>(cloned_from_parameter->user_data<TensorLayout>());
  1319. MS_EXCEPTION_IF_NULL(cloned_parameter_node->abstract());
  1320. MS_EXCEPTION_IF_NULL(cloned_from_node->abstract());
  1321. auto cloned_abstract = cloned_parameter_node->abstract()->Clone();
  1322. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1323. cloned_abstract->set_shape(cloned_from_node->abstract()->GetShapeTrack());
  1324. cloned_parameter_node->set_abstract(cloned_abstract);
  1325. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name()
  1326. << " is cloned, the be cloned parameter is: " << cloned_from_parameter->name()
  1327. << ", clone index is: " << cloned_index;
  1328. } else {
  1329. MS_LOG(EXCEPTION) << "The parameter: " << cloned_parameter->name() << " is cloned, cloned index is "
  1330. << cloned_index << ", but not found the be cloned parameter";
  1331. }
  1332. }
  1333. }
  1334. void SetVirtualDatasetStrategy(const CNodePtr &node) {
  1335. MS_EXCEPTION_IF_NULL(node);
  1336. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1337. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1338. PrimitivePtr prim = GetValueNode<PrimitivePtr>(node->input(0));
  1339. MS_EXCEPTION_IF_NULL(prim);
  1340. if (prim->name() == VIRTUAL_DATA_SET) {
  1341. CheckGlobalDeviceManager();
  1342. int32_t dev_num;
  1343. if (full_batch) {
  1344. dev_num = 1;
  1345. } else {
  1346. dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1347. }
  1348. auto attrs_temp = prim->attrs();
  1349. std::vector<Shapes> shape_list = ExtractShape(node);
  1350. if (shape_list.empty()) {
  1351. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1352. }
  1353. std::vector<ValuePtr> elements;
  1354. for (size_t i = 0; i < shape_list[0].size(); i++) {
  1355. if (shape_list[0][i].empty()) {
  1356. MS_LOG(EXCEPTION) << "shape_list[ " << i << " ].size() is zero";
  1357. }
  1358. Dimensions input_strategy = {dev_num};
  1359. for (size_t j = 1; j < shape_list[0][i].size(); j++) {
  1360. input_strategy.push_back(1);
  1361. }
  1362. elements.push_back(MakeValue(input_strategy));
  1363. }
  1364. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1365. attrs_temp[STRATEGY] = strategy;
  1366. (void)prim->SetAttrs(attrs_temp);
  1367. }
  1368. }
  1369. void ExtractInformation(const std::vector<AnfNodePtr> &all_nodes) {
  1370. // load strategy map from checkpoint
  1371. StrategyMap stra_map;
  1372. if (StrategyCheckpoint::GetInstance().LoadCheckPointOn()) {
  1373. if (StrategyCheckpoint::GetInstance().Load(&stra_map) != SUCCESS) {
  1374. MS_LOG(EXCEPTION) << "Load strategy checkpoint failed";
  1375. }
  1376. }
  1377. for (auto &node : all_nodes) {
  1378. auto cnode = node->cast<CNodePtr>();
  1379. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1380. continue;
  1381. }
  1382. SetVirtualDatasetStrategy(cnode);
  1383. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1384. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1385. if (prim->name() == MAKE_TUPLE) {
  1386. continue;
  1387. }
  1388. auto attrs = prim->attrs();
  1389. MS_LOG(INFO) << "extract information: node: " << node->ToString() << " prim " << prim->name();
  1390. if (IsParallelCareNode(cnode)) {
  1391. std::vector<Shapes> shape_list = ExtractShape(cnode);
  1392. if (shape_list.empty()) {
  1393. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1394. }
  1395. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1396. if (operator_ == nullptr) {
  1397. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->name() << " OperatorInstance failed";
  1398. }
  1399. auto &inputs = cnode->inputs();
  1400. std::vector<ValuePtr> input_value;
  1401. for (size_t index = 1; index < inputs.size(); ++index) {
  1402. if (inputs[index]->isa<ValueNode>()) {
  1403. input_value.push_back(GetValueNode(inputs[index]));
  1404. } else {
  1405. input_value.emplace_back(nullptr);
  1406. }
  1407. }
  1408. StrategyPtr strategyPtr = nullptr;
  1409. (*operator_).set_input_value(input_value);
  1410. (*operator_).set_outputs_dtype(cnode->Type());
  1411. (*operator_).set_cnode(cnode);
  1412. if (prim->name() == RESHAPE) {
  1413. cnode->set_user_data<OperatorInfo>(operator_);
  1414. continue;
  1415. }
  1416. // load strategy checkpoint
  1417. // key of strategy map
  1418. std::string strategy_key_name = "";
  1419. auto param_names = NodeParameterName(cnode);
  1420. if (!param_names.empty()) {
  1421. strategy_key_name = param_names[0].first;
  1422. }
  1423. bool load_strategy_from_ckpt =
  1424. StrategyCheckpoint::GetInstance().LoadCheckPointOn() && stra_map.find(strategy_key_name) != stra_map.end();
  1425. if (!StrategyFound(attrs) && !load_strategy_from_ckpt) {
  1426. MS_LOG(INFO) << "ExtractInformation: the strategy of node " << node->ToString() << " prim " << prim->name()
  1427. << " is empty, using batch parallel";
  1428. std::shared_ptr<Strategys> strategy_v_ptr = operator_->GenerateBatchStrategies();
  1429. if (strategy_v_ptr == nullptr) {
  1430. MS_LOG(EXCEPTION) << "Failure:Generate batch parallel strategy failed";
  1431. }
  1432. std::vector<ValuePtr> elements;
  1433. for (size_t i = 0; i < strategy_v_ptr->size(); i++) {
  1434. elements.push_back(MakeValue((*strategy_v_ptr)[i]));
  1435. }
  1436. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1437. // display the strategy generated by batch parallel
  1438. attrs[GEN_STRATEGY] = strategy;
  1439. (void)prim->SetAttrs(attrs);
  1440. MS_LOG(INFO) << "node " << node->ToString() << " prim " << prim->name() << " batch parallel strategy is "
  1441. << attrs[GEN_STRATEGY]->ToString();
  1442. strategyPtr = NewStrategy(0, *strategy_v_ptr);
  1443. } else if (load_strategy_from_ckpt) {
  1444. strategyPtr = stra_map[strategy_key_name];
  1445. } else {
  1446. strategyPtr = ExtractStrategy(attrs);
  1447. }
  1448. if (strategyPtr != nullptr) {
  1449. if (operator_->Init(strategyPtr) == FAILED) {
  1450. MS_LOG(EXCEPTION) << "Failure:operator " << prim->name() << " init failed";
  1451. }
  1452. cnode->set_user_data<OperatorInfo>(operator_);
  1453. } else {
  1454. MS_LOG(EXCEPTION) << "ERROR:strategy_ptr is nullptr";
  1455. }
  1456. }
  1457. }
  1458. }
  1459. TensorLayout GetInputLayoutFromCNode(const std::pair<AnfNodePtr, int> &node_pair) {
  1460. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1461. MS_EXCEPTION_IF_NULL(cnode);
  1462. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1463. MS_EXCEPTION_IF_NULL(distribute_operator);
  1464. int index = node_pair.second;
  1465. if (index > SizeToInt(distribute_operator->inputs_tensor_info().size())) {
  1466. MS_LOG(EXCEPTION) << "The index is out of range, the node_pair.second is " << index - 1 << ", the vector size is "
  1467. << distribute_operator->inputs_tensor_info().size();
  1468. }
  1469. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  1470. TensorLayout tensorlayout_in = tensorinfo_in.tensor_layout();
  1471. return tensorlayout_in;
  1472. }
  1473. // if reshape's output connect to several primitive, return the first layout found
  1474. std::shared_ptr<TensorLayout> FindNextLayout(const CNodePtr &cnode) {
  1475. MS_EXCEPTION_IF_NULL(cnode);
  1476. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  1477. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  1478. MS_EXCEPTION_IF_NULL(manager);
  1479. AnfNodeIndexSet node_set = manager->node_users()[cnode];
  1480. for (auto &node_pair : node_set) {
  1481. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1482. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1483. continue;
  1484. }
  1485. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1486. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1487. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1488. MS_EXCEPTION_IF_NULL(node_prim);
  1489. MS_LOG(INFO) << "FindNextLayout prim " << node_prim->name();
  1490. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1491. continue;
  1492. }
  1493. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1494. MS_LOG(INFO) << "FindNextLayout success prim " << node_prim->name();
  1495. auto layout = GetInputLayoutFromCNode(node_pair);
  1496. return std::make_shared<TensorLayout>(layout);
  1497. }
  1498. MS_LOG(DEBUG) << "FindNextLayout failed prim " << node_prim->name() << " " << IsParallelCareNode(use_apply)
  1499. << " " << use_apply->has_user_data<OperatorInfo>();
  1500. auto layout_ptr = FindNextLayout(use_apply);
  1501. if (layout_ptr) {
  1502. return layout_ptr;
  1503. }
  1504. }
  1505. MS_LOG(WARNING) << "FindNextLayout return nullptr, if reshape is not the last primitive, there must be some error";
  1506. return nullptr;
  1507. }
  1508. std::shared_ptr<TensorLayout> GetOutputLayoutFromCNode(const CNodePtr &cnode, size_t output_index) {
  1509. MS_EXCEPTION_IF_NULL(cnode);
  1510. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1511. MS_EXCEPTION_IF_NULL(distribute_operator);
  1512. if (distribute_operator->outputs_tensor_info().size() < output_index) {
  1513. MS_LOG(EXCEPTION) << "outputs_tensor_info size is " << distribute_operator->inputs_tensor_info().size()
  1514. << ", must be less than output_index " << output_index;
  1515. }
  1516. TensorInfo tensorinfo_out = distribute_operator->outputs_tensor_info()[output_index];
  1517. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  1518. return std::make_shared<TensorLayout>(tensorlayout_out);
  1519. }
  1520. std::shared_ptr<TensorLayout> FindPrevParallelCareNodeLayout(const AnfNodePtr &node, size_t output_index) {
  1521. if (!node->isa<CNode>()) {
  1522. return nullptr;
  1523. }
  1524. CNodePtr cnode = node->cast<CNodePtr>();
  1525. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1526. return nullptr;
  1527. }
  1528. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1529. auto layout_ptr = GetOutputLayoutFromCNode(cnode, output_index);
  1530. if (!layout_ptr) {
  1531. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1532. }
  1533. return layout_ptr;
  1534. }
  1535. return nullptr;
  1536. }
  1537. std::shared_ptr<TensorLayout> CreateParameterLayout(const AnfNodePtr &node) {
  1538. // Create DataParallel tensor layout for parameter(support WideDeep).
  1539. CheckGlobalDeviceManager();
  1540. int32_t dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1541. TensorLayout input_tensor_layout;
  1542. // create input_shape
  1543. Shapes inputs_shape = GetNodeShape(node);
  1544. Shape input_shape_array = inputs_shape[0];
  1545. if (input_shape_array.empty()) {
  1546. MS_LOG(EXCEPTION) << "Don't support reshape a scalar parameter.";
  1547. }
  1548. // create tensor_map
  1549. size_t shape_size = input_shape_array.size();
  1550. TensorMap input_tensor_map_array(SizeToInt(shape_size) - 1, -1);
  1551. input_tensor_map_array.insert(input_tensor_map_array.begin(), 0);
  1552. // create dev_matrix
  1553. Shape dev_matrix_array = {dev_num};
  1554. if (input_tensor_layout.InitFromVector(dev_matrix_array, input_tensor_map_array, input_shape_array) != SUCCESS) {
  1555. MS_LOG(EXCEPTION) << "Create tensor layout for parameter failed.";
  1556. }
  1557. return std::make_shared<TensorLayout>(input_tensor_layout);
  1558. }
  1559. std::shared_ptr<TensorLayout> FindPrevLayout(const AnfNodePtr &node) {
  1560. if (node->isa<Parameter>()) {
  1561. return CreateParameterLayout(node);
  1562. }
  1563. if (!node->isa<CNode>()) {
  1564. return nullptr;
  1565. }
  1566. CNodePtr cnode = node->cast<CNodePtr>();
  1567. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1568. return nullptr;
  1569. }
  1570. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1571. auto layout_ptr = GetOutputLayoutFromCNode(cnode, 0);
  1572. if (!layout_ptr) {
  1573. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1574. }
  1575. return layout_ptr;
  1576. }
  1577. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1578. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1579. if (prim->name() == TUPLE_GETITEM) {
  1580. auto tuple_index = GetTupleGetItemIndex(cnode);
  1581. auto layout_ptr = FindPrevParallelCareNodeLayout(cnode->input(1), IntToSize(tuple_index));
  1582. if (!layout_ptr) {
  1583. MS_LOG(EXCEPTION)
  1584. << " Failure:FindPrevLayout failed, tuple_getitem before reshape, but there does not exit a parallel care node "
  1585. "before tuple_getitem!";
  1586. }
  1587. return layout_ptr;
  1588. }
  1589. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1590. if (prim->name() == DEPEND && index != 1) {
  1591. continue;
  1592. }
  1593. auto layout_ptr = FindPrevLayout(cnode->inputs()[index]);
  1594. if (!layout_ptr) {
  1595. continue;
  1596. }
  1597. return layout_ptr;
  1598. }
  1599. MS_LOG(WARNING) << "FindPrevLayout return nullptr, if reshape is not the first primitive, there must be some error";
  1600. return nullptr;
  1601. }
  1602. void ReshapeInit(const std::vector<AnfNodePtr> &all_nodes) {
  1603. for (auto &node : all_nodes) {
  1604. auto cnode = node->cast<CNodePtr>();
  1605. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1606. continue;
  1607. }
  1608. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1609. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  1610. continue;
  1611. }
  1612. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1613. MS_EXCEPTION_IF_NULL(prim);
  1614. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  1615. if (operator_info == nullptr) {
  1616. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->ToString() << " OperatorInstance is nullptr";
  1617. }
  1618. if (prim->name() != RESHAPE) {
  1619. continue;
  1620. }
  1621. auto attrs = prim->attrs();
  1622. if (StrategyFound(attrs)) {
  1623. MS_LOG(EXCEPTION) << "Setting strategy for Reshape goes for nothing!";
  1624. }
  1625. MS_ASSERT(cnode->inputs().size() == 3);
  1626. auto prev_layout_ptr = FindPrevLayout(cnode->input(1));
  1627. if (prev_layout_ptr) {
  1628. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1629. reshape_info_ptr->SetInputLayout(*prev_layout_ptr);
  1630. }
  1631. auto next_layout_ptr = FindNextLayout(cnode);
  1632. if (next_layout_ptr) {
  1633. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1634. reshape_info_ptr->SetOutputLayout(*next_layout_ptr);
  1635. }
  1636. if (operator_info->Init(nullptr) == FAILED) {
  1637. MS_LOG(EXCEPTION) << "Failure:operator " << prim->ToString() << " init failed";
  1638. }
  1639. }
  1640. }
  1641. CNodePtr FindLossCNode(const FuncGraphPtr &func_graph) {
  1642. MS_EXCEPTION_IF_NULL(func_graph);
  1643. CNodePtr return_node = func_graph->get_return();
  1644. MS_EXCEPTION_IF_NULL(return_node);
  1645. if (return_node->size() < 2) {
  1646. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  1647. }
  1648. AnfNodePtr pre_node = return_node->input(1);
  1649. MS_EXCEPTION_IF_NULL(pre_node);
  1650. auto pre_cnode = pre_node->cast<CNodePtr>();
  1651. if (pre_cnode == nullptr) {
  1652. return nullptr;
  1653. }
  1654. auto current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1655. // return -> cast
  1656. if (current_prim->name() == CAST && !pre_cnode->has_user_data<OperatorInfo>()) {
  1657. pre_cnode = pre_cnode->input(1)->cast<CNodePtr>();
  1658. MS_EXCEPTION_IF_NULL(pre_cnode);
  1659. current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1660. }
  1661. // notice: the GetNext op has not input
  1662. if (INVALID_LOSS_OPS.find(current_prim->name()) != INVALID_LOSS_OPS.end()) {
  1663. MS_LOG(INFO) << "The loss is: " << current_prim->name();
  1664. return pre_cnode;
  1665. }
  1666. // size of common cnode is larger than 1
  1667. if (pre_cnode->size() < 2) {
  1668. MS_LOG(EXCEPTION) << pre_cnode->ToString() << " size( " << pre_cnode->inputs().size() << " ) is smaller than 2";
  1669. }
  1670. // return -> tuple_getitem -> loss
  1671. if (current_prim->name() == TUPLE_GETITEM) {
  1672. AnfNodePtr pre_pre_node = pre_cnode->input(1);
  1673. MS_EXCEPTION_IF_NULL(pre_pre_node);
  1674. auto pre_pre_cnode = pre_pre_node->cast<CNodePtr>();
  1675. auto value = pre_pre_cnode->input(0)->cast<ValueNodePtr>();
  1676. MS_EXCEPTION_IF_NULL(value);
  1677. PrimitivePtr prim = value->value()->cast<PrimitivePtr>();
  1678. MS_EXCEPTION_IF_NULL(prim);
  1679. MS_LOG(DEBUG) << "The loss name is " << prim->name();
  1680. return pre_pre_cnode;
  1681. }
  1682. // return -> make_tuple
  1683. if (current_prim->name() == MAKE_TUPLE) {
  1684. MS_LOG(EXCEPTION) << "The loss have make_tuple, it is not supported";
  1685. }
  1686. // return -> loss
  1687. MS_LOG(DEBUG) << "The loss name is " << current_prim->name();
  1688. return pre_cnode;
  1689. }
  1690. TensorLayouts GetLossNodeGradOutputLayout(const CNodePtr &loss_cnode) {
  1691. TensorLayouts ret;
  1692. MS_EXCEPTION_IF_NULL(loss_cnode);
  1693. AnfNodePtr node = loss_cnode->cast<AnfNodePtr>();
  1694. MS_EXCEPTION_IF_NULL(node);
  1695. LossNodeInfo node_info = GetLossNodeInfo(node);
  1696. ValueNodePtr prim_anf_node = loss_cnode->input(0)->cast<ValueNodePtr>();
  1697. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1698. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1699. MS_EXCEPTION_IF_NULL(prim);
  1700. if (INVALID_LOSS_OPS.find(prim->name()) != INVALID_LOSS_OPS.end()) {
  1701. MS_LOG(WARNING) << "The loss name is: " << prim->name() << ", do nothing for split sens now";
  1702. return ret;
  1703. }
  1704. OperatorInfoPtr operator_info = loss_cnode->user_data<OperatorInfo>();
  1705. MS_EXCEPTION_IF_NULL(operator_info);
  1706. TensorInfo loss_grad_tensor_info;
  1707. size_t op_output_size = operator_info->outputs_tensor_info().size();
  1708. MS_LOG(INFO) << "The loss name is " << operator_info->name() << ", the has tuple item is "
  1709. << node_info.has_tuple_getitem << ", the output size is " << op_output_size << ", the dout_index is "
  1710. << node_info.dout_index;
  1711. if ((op_output_size == 0) || (op_output_size <= IntToSize(node_info.dout_index))) {
  1712. MS_LOG(EXCEPTION) << "The index is " << node_info.dout_index << ", but the size of outputs is " << op_output_size;
  1713. }
  1714. if (!node_info.has_tuple_getitem && (op_output_size > 1)) {
  1715. MS_LOG(EXCEPTION) << "Currently, it is not supported that the sens is a tuple.";
  1716. }
  1717. loss_grad_tensor_info = operator_info->outputs_tensor_info()[IntToSize(node_info.dout_index)];
  1718. ret.push_back(loss_grad_tensor_info.tensor_layout());
  1719. return ret;
  1720. }
  1721. void SplitSens(const CNodePtr &grad_sens_node, const TensorLayout &loss_grad_layout) {
  1722. MS_EXCEPTION_IF_NULL(grad_sens_node);
  1723. if (grad_sens_node->size() <= 1) {
  1724. MS_LOG(EXCEPTION) << "The size of grad sens node is smaller than 2";
  1725. }
  1726. AnfNodePtr sens_tensor_node = grad_sens_node->input(1);
  1727. MS_EXCEPTION_IF_NULL(sens_tensor_node);
  1728. Shapes sens_shapes = GetNodeShape(sens_tensor_node);
  1729. if (sens_shapes.size() != 1) {
  1730. MS_LOG(EXCEPTION) << "GetNodeShape for sens_tensor_node, output size is not 1";
  1731. }
  1732. // If the shape of sens tensor is [] or [1], no need to split it.
  1733. Shape sens_shape = sens_shapes[0];
  1734. if (sens_shape.empty() || ((sens_shape.size() == 1) && (sens_shape[0] == 1))) {
  1735. if (sens_tensor_node->isa<Parameter>()) {
  1736. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1737. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1738. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  1739. }
  1740. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", no need to split sens";
  1741. return;
  1742. }
  1743. auto loss_shape = loss_grad_layout.tensor_shape().array();
  1744. if (loss_shape != sens_shape) {
  1745. MS_LOG(EXCEPTION) << "The shape of sens is not equal to loss output, it is unsupported now. Sens shape is "
  1746. << ShapeToString(sens_shape) << ", loss shape is " << ShapeToString(loss_shape);
  1747. }
  1748. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", split it.";
  1749. if (!IsValueNode<Tensor>(sens_tensor_node)) {
  1750. if (sens_tensor_node->isa<Parameter>()) {
  1751. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1752. AbstractBasePtr abstract = sens_tensor_node->abstract();
  1753. MS_EXCEPTION_IF_NULL(abstract);
  1754. auto slice_shape = loss_grad_layout.slice_shape().array();
  1755. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1756. MS_EXCEPTION_IF_NULL(parallel_shape);
  1757. auto cloned_abstract = abstract->Clone();
  1758. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1759. cloned_abstract->set_shape(parallel_shape);
  1760. sens_tensor_node->set_abstract(cloned_abstract);
  1761. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1762. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  1763. return;
  1764. }
  1765. MS_LOG(EXCEPTION) << "The type of sens node is not Tensor or Parameter, it is unsupported now.";
  1766. }
  1767. // Use _GetTensorSlice operator to split the sens tensor
  1768. FuncGraphPtr func_graph = grad_sens_node->func_graph(); // only cnode can get the graph
  1769. MS_EXCEPTION_IF_NULL(func_graph);
  1770. Operator op = CreateGetTensorSliceOp(loss_grad_layout);
  1771. InsertGetTensorSliceOp(op, grad_sens_node, func_graph, 1, SPLIT_SENS);
  1772. }
  1773. void InsertForwardOps(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1774. MS_EXCEPTION_IF_NULL(distribute_operator);
  1775. MS_EXCEPTION_IF_NULL(cnode);
  1776. OperatorVector forward_op = distribute_operator->forward_op();
  1777. if (!forward_op.empty()) {
  1778. MS_LOG(INFO) << "Insert forward op for " << distribute_operator->name();
  1779. ForwardCommunication(forward_op, cnode);
  1780. }
  1781. }
  1782. void StepReplace(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1783. MS_EXCEPTION_IF_NULL(distribute_operator);
  1784. MS_EXCEPTION_IF_NULL(cnode);
  1785. // StepReplaceOp
  1786. OperatorVector replace_op = distribute_operator->replace_op();
  1787. if (!replace_op.empty()) {
  1788. MS_LOG(INFO) << "StepReplaceOp " << cnode->ToString();
  1789. StepReplaceOp(replace_op, cnode);
  1790. }
  1791. // StepReplaceGraph: after calling StepReplaceGraph, cnode can not be used anymore.
  1792. ReplaceGraphPtr replace_graph = distribute_operator->replace_graph(cnode);
  1793. if (!replace_op.empty() && replace_graph) {
  1794. MS_LOG(EXCEPTION) << "Only one of replace_op or replace_op can be used";
  1795. }
  1796. if (replace_graph) {
  1797. MS_LOG(INFO) << "StepReplaceGraph " << cnode->ToString();
  1798. StepReplaceGraph(replace_graph, cnode);
  1799. }
  1800. }
  1801. void HandleDropoutNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1802. MS_EXCEPTION_IF_NULL(distribute_operator);
  1803. MS_EXCEPTION_IF_NULL(cnode);
  1804. std::string op_name = distribute_operator->name();
  1805. if (op_name.find(DROPOUT_DO_MASK) == std::string::npos) {
  1806. return;
  1807. }
  1808. DropoutDoMaskInfoPtr dropout_do_mask = std::dynamic_pointer_cast<DropoutDoMaskInfo>(distribute_operator);
  1809. MS_EXCEPTION_IF_NULL(dropout_do_mask);
  1810. std::vector<Operator> replace_op = dropout_do_mask->GetDropoutGenMaskReplaceOp(cnode);
  1811. if (replace_op.empty()) {
  1812. MS_LOG(DEBUG) << "No need to replace dropout_gen_mask";
  1813. return;
  1814. }
  1815. if (cnode->inputs().size() != DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  1816. MS_LOG(EXCEPTION) << "The size of drop out do mask cnode's input is not " << DROPOUT_DO_MASK_CNODE_INPUT_SIZE;
  1817. }
  1818. ReplaceOneOp(replace_op[0], cnode->input(DROPOUT_GEN_MASK_INDEX)->cast<CNodePtr>());
  1819. }
  1820. void HandleTileNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1821. MS_EXCEPTION_IF_NULL(cnode);
  1822. if (cnode->size() < 3 || !IsValueNode<Primitive>(cnode->input(0))) {
  1823. return;
  1824. }
  1825. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1826. if (prim->name() != TILE) {
  1827. return;
  1828. }
  1829. TileInfoPtr tile = std::dynamic_pointer_cast<TileInfo>(distribute_operator);
  1830. MS_EXCEPTION_IF_NULL(tile);
  1831. tile->UpdateMultiples(cnode);
  1832. }
  1833. void HandleSpecialNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1834. HandleDropoutNode(distribute_operator, cnode);
  1835. HandleTileNode(distribute_operator, cnode);
  1836. }
  1837. std::set<FuncGraphPtr> FindForwardGraphByRootNodes(const AnfNodeSet &root_all_nodes) {
  1838. // J->CNode->Graph
  1839. std::set<FuncGraphPtr> graph_set;
  1840. for (auto &node : root_all_nodes) {
  1841. MS_EXCEPTION_IF_NULL(node);
  1842. if (!node->isa<CNode>()) {
  1843. continue;
  1844. }
  1845. auto cnode = node->cast<CNodePtr>();
  1846. if ((cnode->size() < 2) || !IsValueNode<Primitive>(cnode->input(0))) {
  1847. continue;
  1848. }
  1849. auto expect_j_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1850. if (expect_j_prim->name() != J) {
  1851. continue;
  1852. }
  1853. if (IsValueNode<FuncGraph>(cnode->input(1))) {
  1854. auto graph = GetValueNode<FuncGraphPtr>(cnode->input(1));
  1855. MS_LOG(DEBUG) << "Find the forward graph success";
  1856. graph_set.insert(graph);
  1857. }
  1858. }
  1859. return graph_set;
  1860. }
  1861. void StepSplitSens(const std::pair<CNodePtr, CNodePtr> &sens_loss_pair) {
  1862. CNodePtr sens_node = sens_loss_pair.first;
  1863. CNodePtr loss_node = sens_loss_pair.second;
  1864. auto loss_grad_layout = GetLossNodeGradOutputLayout(loss_node);
  1865. if (!loss_grad_layout.empty()) {
  1866. SplitSens(sens_node, loss_grad_layout[0]);
  1867. }
  1868. }
  1869. // Sens node satisfies the following conditions: cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  1870. std::vector<std::pair<CNodePtr, CNodePtr>> GetSensLossPairs(const FuncGraphPtr &root) {
  1871. MS_EXCEPTION_IF_NULL(root);
  1872. std::vector<std::pair<CNodePtr, CNodePtr>> sens_loss_pairs;
  1873. for (auto &node : root->nodes()) {
  1874. if (!node->isa<CNode>()) {
  1875. continue;
  1876. }
  1877. // cnode(sens)-->cnode(tuple_getitem)
  1878. auto sens_cnode = node->cast<CNodePtr>();
  1879. AnfNodePtr expect_tuple_getitem = sens_cnode->input(0);
  1880. MS_EXCEPTION_IF_NULL(expect_tuple_getitem);
  1881. if (!expect_tuple_getitem->isa<CNode>()) {
  1882. continue;
  1883. }
  1884. auto expect_tuple_getitem_cnode = expect_tuple_getitem->cast<CNodePtr>();
  1885. if (!IsSomePrimitive(expect_tuple_getitem_cnode, TUPLE_GETITEM)) {
  1886. continue;
  1887. }
  1888. // cnode(sens)-->cnode(tuple_getitem)-->cnode
  1889. AnfNodePtr expect_anonymous = expect_tuple_getitem_cnode->input(1);
  1890. MS_EXCEPTION_IF_NULL(expect_anonymous);
  1891. if (!expect_anonymous->isa<CNode>()) {
  1892. continue;
  1893. }
  1894. // cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  1895. auto expect_anonymous_cnode = expect_anonymous->cast<CNodePtr>();
  1896. AnfNodePtr expect_j = expect_anonymous_cnode->input(0);
  1897. MS_EXCEPTION_IF_NULL(expect_j);
  1898. if (!expect_j->isa<CNode>()) {
  1899. continue;
  1900. }
  1901. auto expect_j_cnode = expect_j->cast<CNodePtr>();
  1902. if (!IsSomePrimitive(expect_j_cnode, J)) {
  1903. continue;
  1904. }
  1905. if (!IsValueNode<FuncGraph>(expect_j_cnode->input(1))) {
  1906. MS_LOG(EXCEPTION) << "Sens can't find the corresponding graph.";
  1907. }
  1908. auto func_graph = GetValueNode<FuncGraphPtr>(expect_j_cnode->input(1));
  1909. auto loss_cnode = FindLossCNode(func_graph);
  1910. if (loss_cnode == nullptr) {
  1911. MS_LOG(WARNING) << "Can not find the loss cnode";
  1912. continue;
  1913. }
  1914. std::pair<CNodePtr, CNodePtr> sens_loss_pair = std::make_pair(sens_cnode, loss_cnode);
  1915. sens_loss_pairs.push_back(sens_loss_pair);
  1916. }
  1917. return sens_loss_pairs;
  1918. }
  1919. void ParallelCommunication(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes,
  1920. const FuncGraphManagerPtr &manager) {
  1921. MS_EXCEPTION_IF_NULL(root);
  1922. MS_EXCEPTION_IF_NULL(manager);
  1923. TensorRedistribution tensor_redistribution;
  1924. std::vector<std::pair<CNodePtr, CNodePtr>> sens_loss_pairs = GetSensLossPairs(root);
  1925. bool has_backward = !sens_loss_pairs.empty();
  1926. // split sens must before inserting the operators.
  1927. for (auto &pair : sens_loss_pairs) {
  1928. // If the shape of grad-sens tensor is not [] or [1], use get tensor slice to handel it.
  1929. // If the type of sens node is not Tensor, it is unsupported now, do nothing default.
  1930. StepSplitSens(pair);
  1931. }
  1932. for (auto &node : all_nodes) {
  1933. MS_EXCEPTION_IF_NULL(node);
  1934. if (node->isa<CNode>()) {
  1935. auto cnode = node->cast<CNodePtr>();
  1936. // the make_tuple is parallel care node, but it may have not operator info
  1937. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  1938. continue;
  1939. }
  1940. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1941. MS_EXCEPTION_IF_NULL(distribute_operator);
  1942. // insert forward ops
  1943. InsertForwardOps(distribute_operator, cnode);
  1944. // insert redistribution ops
  1945. StepRedistribution(cnode, distribute_operator, cnode, tensor_redistribution, cnode);
  1946. // insert backward ops
  1947. if (has_backward) {
  1948. BackwardCommunication(distribute_operator, cnode, sens_loss_pairs);
  1949. }
  1950. HandleSpecialNode(distribute_operator, cnode);
  1951. } else if (IsValueNode<Tensor>(node)) {
  1952. StepSplitTensor(node, manager);
  1953. }
  1954. }
  1955. for (auto &node : all_nodes) {
  1956. MS_EXCEPTION_IF_NULL(node);
  1957. if (node->isa<CNode>()) {
  1958. auto cnode = node->cast<CNodePtr>();
  1959. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  1960. continue;
  1961. }
  1962. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1963. MS_EXCEPTION_IF_NULL(distribute_operator);
  1964. // StepReplace
  1965. StepReplace(distribute_operator, cnode);
  1966. }
  1967. }
  1968. }
  1969. namespace {
  1970. void RevertSymbolicKeyInstance(const FuncGraphPtr &root, const AnfNodePtr &node) {
  1971. MS_EXCEPTION_IF_NULL(root);
  1972. MS_EXCEPTION_IF_NULL(node);
  1973. auto symbolic_key = GetValueNode<SymbolicKeyInstancePtr>(node);
  1974. MS_EXCEPTION_IF_NULL(symbolic_key);
  1975. auto all_upstream_node = root->manager()->node_users()[node];
  1976. for (auto &upstream_node : all_upstream_node) {
  1977. FuncGraphPtr fg = upstream_node.first->func_graph();
  1978. if (symbolic_key->node()->isa<Parameter>()) {
  1979. for (auto &param : root->parameters()) {
  1980. if (*param == *symbolic_key->node()) {
  1981. AnfNodePtr reverted_node = root->NewCNode({NewValueNode(prim::kPrimEmbed), param});
  1982. MS_EXCEPTION_IF_NULL(reverted_node);
  1983. MS_LOG(DEBUG) << "before replace " << node->ToString() << " to node " << reverted_node->DebugString();
  1984. (void)fg->manager()->Replace(node, reverted_node);
  1985. MS_LOG(DEBUG) << "revert node " << node->ToString() << " to node " << reverted_node->DebugString();
  1986. }
  1987. }
  1988. }
  1989. }
  1990. }
  1991. } // namespace
  1992. void HandleSymbolicKeyInstance(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes) {
  1993. MS_EXCEPTION_IF_NULL(root);
  1994. for (auto &node : all_nodes) {
  1995. // revert back SymbolicKeyInstance to embed() primitive
  1996. if (IsValueNode<SymbolicKeyInstance>(node)) {
  1997. RevertSymbolicKeyInstance(root, node);
  1998. continue;
  1999. }
  2000. }
  2001. }
  2002. std::vector<std::pair<std::string, int>> NodeParameterName(const CNodePtr &node) {
  2003. std::vector<AnfNodePtr> node_inputs{node->inputs()};
  2004. std::vector<std::pair<std::string, int>> param_names;
  2005. for (int i = 0; i < UintToInt(node_inputs.size()); ++i) {
  2006. auto input = node_inputs[i];
  2007. if (input->isa<Parameter>()) {
  2008. auto input_parameter = input->cast<ParameterPtr>();
  2009. if (input_parameter->has_default()) {
  2010. if (ParameterRequireGrad(input_parameter)) {
  2011. param_names.push_back({input_parameter->name(), i});
  2012. }
  2013. }
  2014. }
  2015. }
  2016. return param_names;
  2017. }
  2018. void CheckpointStrategy(const FuncGraphPtr &func_graph) {
  2019. MS_EXCEPTION_IF_NULL(func_graph);
  2020. MS_LOG(DEBUG) << "Save strategy to checkpoint begin";
  2021. StrategyMap stra_map;
  2022. TensorInfoMap tensor_info_map;
  2023. ManualShapeMap manual_shape_map;
  2024. auto ret = func_graph->get_return();
  2025. auto all_nodes = DeepScopedGraphSearch(ret);
  2026. for (auto &node : all_nodes) {
  2027. MS_EXCEPTION_IF_NULL(node);
  2028. auto cnode = node->cast<CNodePtr>();
  2029. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  2030. continue;
  2031. }
  2032. auto param_names = NodeParameterName(cnode);
  2033. if (param_names.empty()) {
  2034. continue;
  2035. }
  2036. string param_name = param_names[0].first;
  2037. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2038. MS_EXCEPTION_IF_NULL(prim);
  2039. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  2040. if (operator_info) {
  2041. if (operator_info->name().find(RESHAPEINFO) != std::string::npos) {
  2042. continue;
  2043. }
  2044. std::vector<TensorInfo> input_tensor_info = operator_info->inputs_tensor_info();
  2045. StrategyPtr strategyPtr = operator_info->strategy();
  2046. MS_EXCEPTION_IF_NULL(node->scope());
  2047. stra_map[param_name] = strategyPtr;
  2048. for (auto param_name_pair : param_names) {
  2049. if (param_name_pair.second - 1 >= UintToInt(input_tensor_info.size())) {
  2050. continue;
  2051. }
  2052. tensor_info_map[param_name_pair.first] = input_tensor_info[param_name_pair.second - 1];
  2053. }
  2054. if (operator_info->name().find(EMBEDDING_LOOKUP) != std::string::npos ||
  2055. operator_info->name().find(GATHERV2) != std::string::npos) {
  2056. auto gatherv2_info = std::dynamic_pointer_cast<GatherV2PInfo>(operator_info);
  2057. auto param_split_shapes = gatherv2_info->param_split_shapes();
  2058. auto index_offsets = gatherv2_info->index_offsets();
  2059. if (param_split_shapes.size() != index_offsets.size()) {
  2060. MS_LOG(EXCEPTION) << "In manual split, the param_split_shapes and index_offsets lenght should be same.";
  2061. }
  2062. std::vector<std::pair<int32_t, int32_t>> manual_shape;
  2063. for (int i = 0; i < UintToInt(param_split_shapes.size()); ++i) {
  2064. manual_shape.push_back({param_split_shapes[i], index_offsets[i]});
  2065. }
  2066. manual_shape_map[param_name] = manual_shape;
  2067. }
  2068. }
  2069. }
  2070. if (StrategyCheckpoint::GetInstance().Save(stra_map, tensor_info_map, &manual_shape_map) != SUCCESS) {
  2071. MS_LOG(EXCEPTION) << "Save strategy checkpoint failed";
  2072. }
  2073. }
  2074. void SetForwardFlag(const std::vector<AnfNodePtr> &all_nodes) {
  2075. for (auto &node : all_nodes) {
  2076. MS_EXCEPTION_IF_NULL(node);
  2077. if (!node->isa<CNode>()) {
  2078. continue;
  2079. }
  2080. auto cnode = node->cast<CNodePtr>();
  2081. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2082. continue;
  2083. }
  2084. // CNode is globally unique.
  2085. MS_LOG(DEBUG) << "Set forward flag " << cnode->DebugString() << ".";
  2086. cnode->set_in_forward_flag(true);
  2087. }
  2088. }
  2089. void SetForwardFlag(const AnfNodeSet &all_nodes) {
  2090. for (auto &node : all_nodes) {
  2091. MS_EXCEPTION_IF_NULL(node);
  2092. if (!node->isa<CNode>()) {
  2093. continue;
  2094. }
  2095. auto cnode = node->cast<CNodePtr>();
  2096. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2097. continue;
  2098. }
  2099. // CNode is globally unique.
  2100. cnode->set_in_forward_flag(true);
  2101. }
  2102. }
  2103. std::set<FuncGraphPtr> ForwardGraph(const FuncGraphPtr &root) {
  2104. MS_EXCEPTION_IF_NULL(root);
  2105. const auto &all_nodes = root->nodes();
  2106. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2107. return graph_set;
  2108. }
  2109. std::vector<AnfNodePtr> FindRootForwardCNode(const FuncGraphPtr &graph, const AnfNodeSet &all_nodes) {
  2110. MS_EXCEPTION_IF_NULL(graph);
  2111. std::vector<AnfNodePtr> root_forward_nodes;
  2112. auto loss_cnode = FindLossCNode(graph);
  2113. if (loss_cnode == nullptr) {
  2114. MS_LOG(WARNING) << "Can not find the loss cnode";
  2115. return root_forward_nodes;
  2116. }
  2117. auto loss_cnode_id = loss_cnode->UniqueIdThroughCopy();
  2118. for (auto &node : all_nodes) {
  2119. MS_EXCEPTION_IF_NULL(node);
  2120. if (!node->isa<CNode>()) {
  2121. continue;
  2122. }
  2123. auto cnode = node->cast<CNodePtr>();
  2124. auto root_node_id = node->UniqueIdThroughCopy();
  2125. if (loss_cnode_id == root_node_id) {
  2126. root_forward_nodes = DeepLinkedGraphSearch(cnode);
  2127. break;
  2128. }
  2129. }
  2130. return root_forward_nodes;
  2131. }
  2132. void MarkForwardCNode(const FuncGraphPtr &root) {
  2133. MS_EXCEPTION_IF_NULL(root);
  2134. auto all_nodes = root->nodes();
  2135. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2136. if (graph_set.empty()) {
  2137. MS_LOG(INFO) << "Can not find the forward graph, so mark the ops in root graph";
  2138. SetForwardFlag(all_nodes);
  2139. } else {
  2140. for (auto &func_graph : graph_set) {
  2141. MS_LOG(INFO) << "The sub graph size of root is " << root->func_graphs_used().size();
  2142. auto return_node = func_graph->get_return();
  2143. MS_EXCEPTION_IF_NULL(return_node);
  2144. auto all_dfs_nodes = DeepLinkedGraphSearch(return_node);
  2145. SetForwardFlag(all_dfs_nodes);
  2146. auto root_forward_nodes = FindRootForwardCNode(func_graph, all_nodes);
  2147. if (root_forward_nodes.empty()) {
  2148. continue;
  2149. }
  2150. // Mark forward flag for the nodes in root graph.
  2151. SetForwardFlag(root_forward_nodes);
  2152. }
  2153. }
  2154. }
  2155. Status ParallelInit() {
  2156. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2157. int32_t device_num = ParallelContext::GetInstance()->device_num();
  2158. int32_t global_rank = ParallelContext::GetInstance()->global_rank();
  2159. std::string backend = ParallelContext::GetInstance()->communication_backend();
  2160. std::string world_group;
  2161. if (backend == HCCL_BACKEND) {
  2162. world_group = HCCL_WORLD_GROUP;
  2163. } else if (backend == NCCL_BACKEND) {
  2164. world_group = NCCL_WORLD_GROUP;
  2165. } else {
  2166. MS_LOG(EXCEPTION) << "Invalid communication backend: " << backend;
  2167. }
  2168. uint32_t world_rank_size = 0;
  2169. if (!ParallelContext::GetInstance()->device_num_is_set()) {
  2170. if (!CommManager::GetInstance().GetRankSize(world_group, &world_rank_size)) {
  2171. MS_LOG(EXCEPTION) << "Get rank size failed";
  2172. }
  2173. device_num = UintToInt(world_rank_size);
  2174. MS_LOG(INFO) << "Get device num from communication model, the device num is " << device_num;
  2175. }
  2176. uint32_t rank_id = 0;
  2177. if (!ParallelContext::GetInstance()->global_rank_is_set()) {
  2178. if (!CommManager::GetInstance().GetRankID(world_group, &rank_id)) {
  2179. MS_LOG(EXCEPTION) << "Get rank id failed";
  2180. }
  2181. global_rank = UintToInt(rank_id);
  2182. MS_LOG(INFO) << "Get global rank from communication model, the global rank is " << global_rank;
  2183. }
  2184. if (!InitDevice(device_num, global_rank, backend)) {
  2185. MS_LOG(ERROR) << "Init device failed";
  2186. return FAILED;
  2187. }
  2188. MS_LOG(INFO) << "The parallel context: dev num: " << device_num << ", global rank: " << global_rank
  2189. << ", backend: " << backend << ", mirror_mean: " << ParallelContext::GetInstance()->mirror_mean()
  2190. << ", cast_before_mirror: " << ParallelContext::GetInstance()->cast_before_mirror();
  2191. return SUCCESS;
  2192. }
  2193. void HandleForwardMakeTuple(const std::vector<AnfNodePtr> &all_nodes) {
  2194. for (auto &node : all_nodes) {
  2195. if (!AnfNodeIsPrimitive(node, MAKE_TUPLE)) {
  2196. continue;
  2197. }
  2198. auto cnode = node->cast<CNodePtr>();
  2199. MS_EXCEPTION_IF_NULL(cnode);
  2200. if (!cnode->in_forward_flag()) {
  2201. continue;
  2202. }
  2203. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  2204. MS_EXCEPTION_IF_NULL(manager);
  2205. auto make_tuple_user = manager->node_users()[cnode];
  2206. if (make_tuple_user.size() != 1) {
  2207. MS_LOG(EXCEPTION) << "Now the make_tuple's user must be 1, but got " << make_tuple_user.size();
  2208. }
  2209. CNodePtr make_tuple_next_cnode = make_tuple_user.pop().first->cast<CNodePtr>();
  2210. MS_EXCEPTION_IF_NULL(make_tuple_next_cnode);
  2211. std::string make_tuple_user_prim_name = GetPrimName(make_tuple_next_cnode);
  2212. if (!IsParallelCareNode(make_tuple_next_cnode)) {
  2213. MS_LOG(INFO) << "The make_tuple's user is " << make_tuple_user_prim_name << ", no need to set operator info";
  2214. continue;
  2215. }
  2216. if (make_tuple_next_cnode->inputs().size() != 2) {
  2217. MS_LOG(EXCEPTION) << "Now the make_tuple's user only support 1 input, but got "
  2218. << make_tuple_next_cnode->inputs().size() - 1;
  2219. }
  2220. MS_LOG(INFO) << "Set the make_tuple's operator info, and the op name is " << make_tuple_user_prim_name;
  2221. OperatorInfoPtr op_info = GetDistributeOperator(make_tuple_next_cnode);
  2222. MS_EXCEPTION_IF_NULL(op_info);
  2223. cnode->set_user_data<OperatorInfo>(op_info);
  2224. }
  2225. }
  2226. bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) {
  2227. MS_EXCEPTION_IF_NULL(root);
  2228. MS_EXCEPTION_IF_NULL(optimizer);
  2229. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2230. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2231. // assume no change to graph
  2232. bool changes = false;
  2233. // control whether use model_parallel mode
  2234. if (!root->has_flag(AUTO_PARALLEL) || ((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) ||
  2235. (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) {
  2236. if (!root->has_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY)) {
  2237. if (HasStrategy(root)) {
  2238. MS_LOG(INFO) << "Strategies ignored in " << parallel_mode
  2239. << ", set_strategy() only valid in [semi_]auto_parallel.";
  2240. }
  2241. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  2242. }
  2243. return changes;
  2244. }
  2245. struct timeval start_time, end_time;
  2246. (void)gettimeofday(&start_time, nullptr);
  2247. MS_LOG(INFO) << "Now entering step parallel";
  2248. DumpGraph(root, std::string(STEP_PARALLEL_BEGIN));
  2249. pipeline::ResourceBasePtr res = optimizer->resource();
  2250. MS_EXCEPTION_IF_NULL(res);
  2251. FuncGraphManagerPtr manager = res->manager();
  2252. MS_EXCEPTION_IF_NULL(manager);
  2253. AnfNodePtr ret = root->get_return();
  2254. MS_EXCEPTION_IF_NULL(ret);
  2255. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  2256. std::reverse(all_nodes.begin(), all_nodes.end());
  2257. if (parallel_mode != AUTO_PARALLEL) {
  2258. TOTAL_OPS = 0;
  2259. if (ParallelInit() != SUCCESS) {
  2260. MS_LOG(EXCEPTION) << "Parallel init failed";
  2261. }
  2262. // mark the forward cnodes, parallel only care these nodes
  2263. MarkForwardCNode(root);
  2264. if (FindCommunicationOp(all_nodes)) {
  2265. MS_LOG(EXCEPTION) << "The graph contain communication op";
  2266. }
  2267. // extract shape and strategy, set operator_info
  2268. ExtractInformation(all_nodes);
  2269. ReshapeInit(all_nodes);
  2270. }
  2271. HandleForwardMakeTuple(all_nodes);
  2272. // save strategy as checkpoint for multi-train
  2273. if (StrategyCheckpoint::GetInstance().SaveCheckPointOn()) {
  2274. CheckpointStrategy(root);
  2275. }
  2276. HandleSymbolicKeyInstance(root, all_nodes);
  2277. // cover Parallel shape
  2278. CoverSliceShape(root);
  2279. // set the shape for optimizer's clone tensor
  2280. SetClonedTensorShapeForOptimizer(root);
  2281. // ForwardCommunication BackwardCommunication TensorRedistribution
  2282. ParallelCommunication(root, all_nodes, manager);
  2283. DumpGraph(root, std::string(STEP_PARALLEL_END));
  2284. // step parallel only run once
  2285. root->set_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY, true);
  2286. res->results()[pipeline::kStepParallelGraph] = root;
  2287. // in auto parallel mode, no need to check if stategies set
  2288. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  2289. (void)gettimeofday(&end_time, nullptr);
  2290. uint64_t time = kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec - start_time.tv_sec);
  2291. time += static_cast<uint64_t>(end_time.tv_usec - start_time.tv_usec);
  2292. MS_LOG(INFO) << "Now leaving step parallel, used time: " << time << " us";
  2293. return changes;
  2294. }
  2295. // Needed by rec_parser
  2296. std::vector<std::string> ExtractInputsTensorName(const CNodePtr &node) {
  2297. std::vector<std::string> name_inputs;
  2298. std::vector<AnfNodePtr> all_inputs = node->inputs();
  2299. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  2300. std::string node_id = node->UniqueId();
  2301. name_inputs.push_back(node_id);
  2302. for (auto &input : node_inputs) {
  2303. std::string name = input->UniqueId();
  2304. name_inputs.push_back(name);
  2305. }
  2306. return name_inputs;
  2307. }
  2308. } // namespace parallel
  2309. } // namespace mindspore