You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

step_parallel.cc 93 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318
  1. /**
  2. * Copyright 2019 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "parallel/step_parallel.h"
  17. #include <inttypes.h>
  18. #include <sys/time.h>
  19. #include <algorithm>
  20. #include <map>
  21. #include <memory>
  22. #include <set>
  23. #include <string>
  24. #include <unordered_map>
  25. #include <utility>
  26. #include "ir/meta_tensor.h"
  27. #include "operator/ops.h"
  28. #include "optimizer/optimizer.h"
  29. #include "parallel/auto_parallel/graph_costmodel.h"
  30. #include "parallel/context.h"
  31. #include "parallel/device_manager.h"
  32. #include "parallel/dynamic_creator.h"
  33. #include "parallel/graph_util/generate_graph.h"
  34. #include "parallel/graph_util/graph_info.h"
  35. #include "parallel/graph_util/node_info.h"
  36. #include "parallel/node_check.h"
  37. #include "parallel/ops_info/matmul_info.h"
  38. #include "parallel/strategy_checkpoint/parallel_strategy_checkpoint.h"
  39. #include "utils/comm_manager.h"
  40. #include "utils/symbolic.h"
  41. using mindspore::tensor::Tensor;
  42. namespace mindspore {
  43. namespace parallel {
  44. static const std::set<std::string> COMMUNICATION_OPS = {ALL_REDUCE, ALL_GATHER, ALL_TO_ALL, REDUCE_SCATTER};
  45. static const std::set<std::string> INVALID_LOSS_OPS = {GET_NEXT, VIRTUALLOSS};
  46. // g_RefMap, for CNode B input i is a RefKey[Parameter C],
  47. // it will be one item in map with key: C, and value: (B, i)
  48. static std::map<AnfNodePtr, std::pair<AnfNodePtr, int>> g_RefMap;
  49. void SetCommunicationOpGroupLabel(std::vector<AnfNodePtr> new_node_input) {
  50. if (new_node_input.empty()) {
  51. return;
  52. }
  53. ValueNodePtr prim_anf_node = new_node_input[0]->cast<ValueNodePtr>();
  54. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  55. MS_EXCEPTION_IF_NULL(prim);
  56. auto attrs = prim->attrs();
  57. auto iter = attrs.find(GROUP);
  58. if (iter != attrs.end()) {
  59. auto value = iter->second;
  60. MS_EXCEPTION_IF_NULL(value);
  61. if (value->isa<StringImm>()) {
  62. std::string hash_name = value->cast<StringImmPtr>()->value();
  63. MS_EXCEPTION_IF_NULL(g_device_manager);
  64. std::string rank_list_name = g_device_manager->FindRankListNameByHashName(hash_name);
  65. (void)prim->AddAttr(GROUP_RANKS, MakeValue(rank_list_name));
  66. }
  67. }
  68. }
  69. std::vector<AnfNodePtr> CreateInput(const Operator &op, const AnfNodePtr &node, const std::string &instance_name) {
  70. MS_EXCEPTION_IF_NULL(node);
  71. OperatorArgs arg_forward = op.second;
  72. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op.first, instance_name);
  73. MS_EXCEPTION_IF_NULL(pyop_instance);
  74. OperatorParams params = arg_forward.second;
  75. std::vector<AnfNodePtr> new_node_input = {NewValueNode(pyop_instance), node};
  76. if (!params.empty()) {
  77. for (auto &param : params) {
  78. AnfNodePtr val = NewValueNode(param.first.second);
  79. MS_EXCEPTION_IF_NULL(val);
  80. int32_t position = param.second;
  81. (void)new_node_input.insert(new_node_input.begin() + position, val);
  82. }
  83. }
  84. // if the op have 'group' attr, set the rank list name for the op
  85. SetCommunicationOpGroupLabel(new_node_input);
  86. return new_node_input;
  87. }
  88. void InsertNode(const Operator &op, const CNodePtr &node, size_t index, const AnfNodePtr &pre_node,
  89. const FuncGraphPtr &func_graph, const std::string &instance_name) {
  90. // insert new node before the node
  91. FuncGraphManagerPtr manager = func_graph->manager();
  92. MS_EXCEPTION_IF_NULL(manager);
  93. ScopePtr scope = node->scope();
  94. MS_EXCEPTION_IF_NULL(scope);
  95. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  96. CNodePtr new_node = func_graph->NewCNode(node_input);
  97. MS_EXCEPTION_IF_NULL(new_node);
  98. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  99. new_node->set_in_forward_flag(true); // mark forward flag
  100. }
  101. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  102. MS_EXCEPTION_IF_NULL(new_node_value);
  103. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  104. new_node_prim->set_instance_name(instance_name);
  105. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  106. new_node->set_scope(scope);
  107. node_input[0]->set_scope(scope);
  108. manager->SetEdge(node, SizeToInt(index), new_node);
  109. }
  110. std::string CreateInstanceName(const CNodePtr &node, size_t index) {
  111. MS_EXCEPTION_IF_NULL(node);
  112. if (!IsValueNode<Primitive>(node->input(0))) {
  113. MS_LOG(EXCEPTION) << "CreateInstanceName: " << node->ToString() << " doesn't have primitive";
  114. }
  115. std::string name_base = node->fullname_with_scope();
  116. std::string name = name_base + "_" + std::to_string(index);
  117. std::string instance_name = HashInstanceName(name);
  118. return instance_name;
  119. }
  120. void ForwardCommunication(OperatorVector forward_op, const CNodePtr &node) {
  121. MS_EXCEPTION_IF_NULL(node);
  122. // step1:get graph manager distribute_operator
  123. FuncGraphPtr func_graph = node->func_graph();
  124. MS_EXCEPTION_IF_NULL(func_graph);
  125. FuncGraphManagerPtr manager = func_graph->manager();
  126. MS_EXCEPTION_IF_NULL(manager);
  127. auto uses_set = manager->node_users()[node];
  128. CNodePtr node_to_insert = node;
  129. for (auto &uses_pair : uses_set) {
  130. auto uses_cnode = uses_pair.first->cast<CNodePtr>();
  131. MS_EXCEPTION_IF_NULL(uses_cnode);
  132. if (!IsValueNode<Primitive>(uses_cnode->input(0))) {
  133. break;
  134. }
  135. PrimitivePtr value_node_prim = GetValueNode<PrimitivePtr>(uses_cnode->input(0));
  136. MS_EXCEPTION_IF_NULL(value_node_prim);
  137. if (value_node_prim->name() == TUPLE_GETITEM) {
  138. if (uses_set.size() > 1) {
  139. MS_LOG(EXCEPTION) << "Now only support one output, but got " << uses_set.size();
  140. }
  141. node_to_insert = uses_cnode;
  142. }
  143. }
  144. MS_EXCEPTION_IF_NULL(node_to_insert);
  145. std::reverse(forward_op.begin(), forward_op.end());
  146. // step2:traverse op_list and insert node
  147. for (size_t index = 0; index < forward_op.size(); ++index) {
  148. std::string instance_name_base = FORWARD_OP;
  149. std::string instance_name = instance_name_base + "_" + CreateInstanceName(node, index);
  150. std::vector<AnfNodePtr> forward_input = CreateInput(forward_op[index], node_to_insert, instance_name);
  151. CNodePtr forward_node = func_graph->NewCNode(forward_input); // using NewCNode to creat anfnode
  152. MS_EXCEPTION_IF_NULL(forward_node);
  153. ScopePtr scope = node->scope();
  154. MS_EXCEPTION_IF_NULL(scope);
  155. forward_node->set_scope(scope);
  156. forward_node->set_in_forward_flag(true);
  157. forward_input[0]->set_scope(scope);
  158. (void)manager->Replace(node_to_insert, forward_node); // using Replace function to insert node
  159. }
  160. }
  161. CNodePtr InsertMakeTuple(const AnfNodePtr &prev, uint32_t num, const FuncGraphPtr &func_graph) {
  162. MS_EXCEPTION_IF_NULL(prev);
  163. MS_EXCEPTION_IF_NULL(func_graph);
  164. std::vector<AnfNodePtr> make_tuple_inputs;
  165. make_tuple_inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
  166. for (uint32_t i = 0; i < num; i++) {
  167. std::vector<AnfNodePtr> tuple_get_item_inputs{NewValueNode(prim::kPrimTupleGetItem), prev,
  168. CreatInt32Imm(UintToInt(i))};
  169. auto tuple_get_item = func_graph->NewCNode(tuple_get_item_inputs);
  170. MS_EXCEPTION_IF_NULL(tuple_get_item);
  171. make_tuple_inputs.push_back(tuple_get_item);
  172. }
  173. auto make_tuple = func_graph->NewCNode(make_tuple_inputs);
  174. MS_EXCEPTION_IF_NULL(make_tuple);
  175. FuncGraphManagerPtr manager = func_graph->manager();
  176. MS_EXCEPTION_IF_NULL(manager);
  177. (void)manager->Replace(prev, make_tuple);
  178. return make_tuple;
  179. }
  180. void InsertRedistribution(const RedistributionOpListPtr &redistribution_oplist_ptr, const CNodePtr &node,
  181. const FuncGraphPtr &func_graph, int pos, const CNodePtr &pre_node) {
  182. MS_EXCEPTION_IF_NULL(node);
  183. MS_EXCEPTION_IF_NULL(pre_node);
  184. MS_EXCEPTION_IF_NULL(func_graph);
  185. FuncGraphManagerPtr manager = func_graph->manager();
  186. MS_EXCEPTION_IF_NULL(manager);
  187. if ((redistribution_oplist_ptr->first).size() != (redistribution_oplist_ptr->second).size()) {
  188. MS_LOG(EXCEPTION) << "size of OperatorVector and OutPutInfoVector must be the same!";
  189. }
  190. for (size_t index = 0; index < (redistribution_oplist_ptr->first).size(); ++index) {
  191. if (pos >= SizeToInt(node->inputs().size())) {
  192. MS_LOG(EXCEPTION) << "InsertRedistribution:pos can't be larger than node's inputs'size";
  193. }
  194. // Creat new node
  195. AnfNodePtr target_node = node->input(IntToSize(pos));
  196. MS_EXCEPTION_IF_NULL(target_node);
  197. // Creat instance_name
  198. auto op = (redistribution_oplist_ptr->first)[index];
  199. std::string op_name = (redistribution_oplist_ptr->first)[index].first;
  200. std::string instance_name_base = REDISTRIBUTION_OP;
  201. std::string instance_name = instance_name_base + "_" + CreateInstanceName(pre_node, index) + op_name;
  202. InsertNode(op, node, IntToSize(pos), target_node, func_graph, instance_name);
  203. if ((redistribution_oplist_ptr->second)[index].first) {
  204. target_node = node->input(IntToSize(pos));
  205. MS_EXCEPTION_IF_NULL(target_node);
  206. (void)InsertMakeTuple(target_node, (redistribution_oplist_ptr->second)[index].second, func_graph);
  207. }
  208. }
  209. }
  210. void InsertGetTensorSliceOp(const Operator &op, const CNodePtr &node, const FuncGraphPtr &func_graph, int pos,
  211. const std::string &instance_name) {
  212. if (func_graph == nullptr) {
  213. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: the graph is null, the instance name is " << instance_name;
  214. }
  215. FuncGraphManagerPtr manager = func_graph->manager();
  216. MS_EXCEPTION_IF_NULL(manager);
  217. if (pos >= SizeToInt(node->inputs().size())) {
  218. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: pos can't be larger than node's inputs'size, the instance name is "
  219. << instance_name;
  220. }
  221. // Creat new node
  222. AnfNodePtr pre_node = node->input(IntToSize(pos));
  223. MS_EXCEPTION_IF_NULL(pre_node);
  224. InsertNode(op, node, IntToSize(pos), pre_node, func_graph, instance_name);
  225. }
  226. TensorLayout GetTensorInLayout(const CNodePtr &middle_node, const PrimitivePtr &middle_prim,
  227. const OperatorInfoPtr &distribute_operator) {
  228. TensorInfo tensorinfo_in;
  229. if (middle_prim->name() == TUPLE_GETITEM) {
  230. auto value_node = middle_node->input(2)->cast<ValueNodePtr>();
  231. MS_EXCEPTION_IF_NULL(value_node);
  232. size_t index_s = IntToSize(GetValue<int>(value_node->value()));
  233. if (index_s >= distribute_operator->outputs_tensor_info().size()) {
  234. MS_LOG(EXCEPTION) << "The index out of range, index: " << index_s
  235. << ", vector size: " << distribute_operator->outputs_tensor_info().size();
  236. }
  237. tensorinfo_in = distribute_operator->outputs_tensor_info()[index_s];
  238. } else {
  239. if (distribute_operator->outputs_tensor_info().empty()) {
  240. MS_LOG(EXCEPTION) << "The outputs tensor info is empty";
  241. }
  242. tensorinfo_in = distribute_operator->outputs_tensor_info()[0];
  243. }
  244. return tensorinfo_in.tensor_layout();
  245. }
  246. OperatorInfoPtr GetDistributeOperator(const CNodePtr &node) {
  247. MS_EXCEPTION_IF_NULL(node);
  248. if (!IsParallelCareNode(node)) {
  249. return nullptr;
  250. }
  251. OperatorInfoPtr distribute_operator = node->operator_info();
  252. if (distribute_operator == nullptr) {
  253. MS_LOG(EXCEPTION) << "GetDistributeOperator:distribute_operator is nullptr";
  254. }
  255. return distribute_operator;
  256. }
  257. void Redistribution(const std::pair<AnfNodePtr, int> &node_pair, const OperatorInfoPtr &distribute_operator,
  258. const CNodePtr &middle_node, int index, TensorRedistribution tensor_redistribution,
  259. const CNodePtr &pre_node) {
  260. FuncGraphPtr func_graph = middle_node->func_graph();
  261. if (func_graph == nullptr) {
  262. MS_LOG(EXCEPTION) << "Redistribution:get graph failed";
  263. }
  264. CNodePtr next_node = node_pair.first->cast<CNodePtr>();
  265. MS_EXCEPTION_IF_NULL(next_node);
  266. auto middle_value = middle_node->input(0)->cast<ValueNodePtr>();
  267. MS_EXCEPTION_IF_NULL(middle_value);
  268. PrimitivePtr middle_prim = middle_value->value()->cast<PrimitivePtr>();
  269. MS_EXCEPTION_IF_NULL(middle_prim);
  270. OperatorInfoPtr next_distribute_operator = GetDistributeOperator(next_node);
  271. if (next_distribute_operator == nullptr) {
  272. MS_LOG(EXCEPTION) << "Failure: " << next_node->ToString() << " GetDistributeOperator failed";
  273. }
  274. RankList dev_list = distribute_operator->global_device_list();
  275. std::string next_prim_name = GetValueNode<PrimitivePtr>(next_node->input(0))->name();
  276. MS_LOG(DEBUG) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim " << next_prim_name;
  277. MS_LOG(DEBUG) << "Redistribution: middle_node " << middle_node->ToString() << " next_node " << next_node->ToString();
  278. // extract tensor layout in and out
  279. if (distribute_operator->outputs_tensor_info().empty()) {
  280. MS_LOG(EXCEPTION) << "Failure:pre_node's tensorinfo_in is empty";
  281. }
  282. if (IntToSize(index - 1) >= next_distribute_operator->inputs_tensor_info().size()) {
  283. MS_LOG(EXCEPTION) << "The index is out of range, the index is " << index - 1 << ", the vector size is "
  284. << next_distribute_operator->inputs_tensor_info().size();
  285. }
  286. TensorInfo tensorinfo_out = next_distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  287. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  288. TensorLayout tensorlayout_in = GetTensorInLayout(middle_node, middle_prim, distribute_operator);
  289. if (tensor_redistribution.Init(tensorlayout_in, tensorlayout_out, dev_list) == FAILED) {
  290. MS_LOG(ERROR) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim : " << next_prim_name;
  291. MS_LOG(ERROR) << "Redistribution: middle_node " << middle_node->ToString() << " next_node "
  292. << next_node->ToString();
  293. DumpGraph(func_graph, "redistribution_error");
  294. MS_LOG(EXCEPTION) << "Failure:tensor_redistribution init failed";
  295. }
  296. RedistributionOpListPtr redistribution_oplist_ptr = tensor_redistribution.InferTensorRedistributionOperatorList();
  297. if (redistribution_oplist_ptr == nullptr) {
  298. MS_LOG(EXCEPTION) << "Failure:InferTensorRedistribution failed";
  299. }
  300. MS_LOG(DEBUG) << "Redistribution size " << redistribution_oplist_ptr->first.size();
  301. if (!redistribution_oplist_ptr->first.empty()) {
  302. // insert node before next node
  303. InsertRedistribution(redistribution_oplist_ptr, next_node, func_graph, node_pair.second, pre_node);
  304. }
  305. }
  306. bool StrategyFound(std::unordered_map<std::string, ValuePtr> attrs) {
  307. auto iter = attrs.find(STRATEGY);
  308. return !((iter == attrs.end()) || (iter->second->type_name() == NONE));
  309. }
  310. bool IsCommunicationOp(const PrimitivePtr &prim) {
  311. MS_EXCEPTION_IF_NULL(prim);
  312. return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end());
  313. }
  314. bool FindCommunicationOp(const std::vector<AnfNodePtr> &all_nodes) {
  315. for (auto &node : all_nodes) {
  316. MS_EXCEPTION_IF_NULL(node);
  317. if (!node->isa<CNode>()) {
  318. continue;
  319. }
  320. auto cnode = node->cast<CNodePtr>();
  321. if (!IsValueNode<Primitive>(cnode->input(0))) {
  322. continue;
  323. }
  324. ValueNodePtr prim_value_node = cnode->input(0)->cast<ValueNodePtr>();
  325. MS_EXCEPTION_IF_NULL(prim_value_node);
  326. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_value_node);
  327. MS_EXCEPTION_IF_NULL(prim);
  328. if (IsCommunicationOp(prim) && cnode->in_forward_flag()) {
  329. MS_EXCEPTION_IF_NULL(prim_value_node->scope());
  330. MS_LOG(INFO) << "The graph contain communication op: " << prim->name() << ", scope name is "
  331. << prim_value_node->scope()->name();
  332. return true;
  333. }
  334. }
  335. return false;
  336. }
  337. bool IsParallelCareNode(const CNodePtr &cnode) {
  338. MS_EXCEPTION_IF_NULL(cnode);
  339. ValueNodePtr prim_node = cnode->input(0)->cast<ValueNodePtr>();
  340. if (prim_node == nullptr) {
  341. return false;
  342. }
  343. PrimitivePtr prim = prim_node->value()->cast<PrimitivePtr>();
  344. if (prim == nullptr) {
  345. return false;
  346. }
  347. if (IsInBlackList(prim)) {
  348. MS_LOG(INFO) << "Parallel don't care node: " << prim->name();
  349. return false;
  350. }
  351. // get_next is not in the forward graph, we need mark the get_next as the forward node
  352. if (prim->name() == GET_NEXT) {
  353. return true;
  354. }
  355. if ((prim->name() == CAST) && (cnode->operator_info() == nullptr)) {
  356. return false;
  357. }
  358. return cnode->in_forward_flag();
  359. }
  360. void StepRedistribution(const CNodePtr &node, const OperatorInfoPtr &distribute_operator, const CNodePtr &insert_node,
  361. const TensorRedistribution &tensor_redistribution, const CNodePtr &pre_node) {
  362. MS_EXCEPTION_IF_NULL(node->func_graph());
  363. FuncGraphManagerPtr manager = node->func_graph()->manager();
  364. MS_EXCEPTION_IF_NULL(manager);
  365. AnfNodeIndexSet node_set = manager->node_users()[node];
  366. CNodePtr insert_node_new;
  367. if (IsValueNode<Primitive>(node->input(0))) {
  368. auto current_value = node->input(0)->cast<ValueNodePtr>();
  369. MS_EXCEPTION_IF_NULL(current_value);
  370. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  371. MS_EXCEPTION_IF_NULL(current_prim);
  372. insert_node_new = ((current_prim->name() == TUPLE_GETITEM) ? node : insert_node);
  373. } else {
  374. insert_node_new = insert_node;
  375. }
  376. MS_EXCEPTION_IF_NULL(insert_node_new);
  377. for (auto &node_pair : node_set) {
  378. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  379. MS_EXCEPTION_IF_NULL(use_cnode);
  380. if (!IsValueNode<Primitive>(use_cnode->input(0))) {
  381. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  382. } else {
  383. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  384. MS_EXCEPTION_IF_NULL(prim_anf_node);
  385. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  386. MS_EXCEPTION_IF_NULL(node_prim);
  387. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  388. continue;
  389. }
  390. if (IsParallelCareNode(use_cnode) && (use_cnode->operator_info() != nullptr)) {
  391. Redistribution(node_pair, distribute_operator, insert_node_new, node_pair.second, tensor_redistribution,
  392. pre_node);
  393. } else {
  394. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  395. }
  396. }
  397. }
  398. }
  399. void SplitTensor(const AnfNodePtr &node, const CNodePtr &next_node, int index) {
  400. MS_EXCEPTION_IF_NULL(node);
  401. MS_EXCEPTION_IF_NULL(next_node);
  402. OperatorInfoPtr op_info = next_node->operator_info();
  403. MS_EXCEPTION_IF_NULL(op_info);
  404. // If the shape of tensor is [] or [1], no need to split it.
  405. Shapes shapes = GetNodeShape(node);
  406. if (shapes.size() != 1) {
  407. MS_LOG(EXCEPTION) << "Split tensor for " << op_info->name()
  408. << ": GetNodeShape for tensor_node, output size is not 1";
  409. }
  410. Shape shape = shapes[0];
  411. std::string shape_str = ShapeToString(shape);
  412. if (shape.empty() || ((shape.size() == 1) && (shape[0] == 1))) {
  413. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape is " << shape_str
  414. << ", no need to split it.";
  415. return;
  416. }
  417. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape of tensor is " << shape_str;
  418. // extract tensor layout
  419. if (IntToSize(index - 1) >= op_info->inputs_tensor_info().size()) {
  420. MS_LOG(EXCEPTION) << "The index is out of range, index is " << index - 1 << ", vector size is "
  421. << op_info->inputs_tensor_info().size();
  422. }
  423. TensorInfo tensor_info = op_info->inputs_tensor_info()[IntToSize(index - 1)];
  424. TensorLayout tensor_layout = tensor_info.tensor_layout();
  425. // Use _GetTensorSlice operator to split the tensor
  426. FuncGraphPtr func_graph = next_node->func_graph(); // only cnode can get the graph
  427. MS_EXCEPTION_IF_NULL(func_graph);
  428. Operator op = CreateGetTensorSliceOp(tensor_layout);
  429. InsertGetTensorSliceOp(op, next_node, func_graph, index, SPLIT_TENSOR);
  430. if (!op_info->sub_ops().empty()) {
  431. auto sub_ops = op_info->sub_ops();
  432. for (size_t i = 0; i < sub_ops.size(); i++) {
  433. if (!sub_ops.at(i).empty()) {
  434. InsertGetTensorSliceOp(sub_ops.at(i).at(0), next_node, func_graph, index, SUB);
  435. }
  436. }
  437. }
  438. }
  439. void StepSplitTensor(const AnfNodePtr &node, const FuncGraphManagerPtr &manager) {
  440. MS_EXCEPTION_IF_NULL(node);
  441. MS_EXCEPTION_IF_NULL(manager);
  442. AnfNodeIndexSet node_set = manager->node_users()[node];
  443. for (auto &node_pair : node_set) {
  444. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  445. if (use_cnode == nullptr || !IsValueNode<Primitive>(use_cnode->input(0))) {
  446. continue;
  447. }
  448. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  449. MS_EXCEPTION_IF_NULL(prim_anf_node);
  450. PrimitivePtr use_cnode_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  451. MS_EXCEPTION_IF_NULL(use_cnode_prim);
  452. if (use_cnode_prim->name() == DEPEND && node_pair.second != 1) {
  453. continue;
  454. }
  455. if (IsParallelCareNode(use_cnode)) {
  456. SplitTensor(node, use_cnode, node_pair.second);
  457. }
  458. }
  459. }
  460. std::vector<AnfNodePtr> ReplaceOpInput(const Operator &replace_op, const std::string &instance_name,
  461. const CNodePtr &node) {
  462. OperatorArgs arg_replace_op = replace_op.second;
  463. ValuePtr pyop_instance = CreatOpInstance(arg_replace_op.first, replace_op.first, instance_name);
  464. if (pyop_instance == nullptr) {
  465. MS_LOG(EXCEPTION) << "Failure: " << replace_op.first << " CreatOpInstance failed";
  466. }
  467. OperatorParams params = arg_replace_op.second;
  468. if (node->inputs().size() < 2) {
  469. // GetNext operator dose not has input
  470. if (node->inputs().size() == 1) {
  471. return {NewValueNode(pyop_instance)};
  472. }
  473. MS_LOG(EXCEPTION) << "Failure: " << node->ToString() << " size is smaller than 2";
  474. }
  475. std::vector<AnfNodePtr> replace_input = {NewValueNode(pyop_instance), node->input(1)};
  476. if (!params.empty()) {
  477. Param param_first = *(params.begin());
  478. int32_t first_position = param_first.second;
  479. if (first_position == 1) {
  480. replace_input.pop_back();
  481. }
  482. for (auto &param : params) {
  483. AnfNodePtr val = NewValueNode(param.first.second);
  484. if (val == nullptr) {
  485. MS_LOG(EXCEPTION) << "Failure:val is nullptr";
  486. }
  487. int32_t position = param.second;
  488. (void)replace_input.insert(replace_input.begin() + position, val);
  489. }
  490. }
  491. return replace_input;
  492. }
  493. void ReplaceOneOp(const Operator &replace_op, const CNodePtr &node) {
  494. FuncGraphPtr func_graph = node->func_graph();
  495. MS_EXCEPTION_IF_NULL(func_graph);
  496. FuncGraphManagerPtr manager = func_graph->manager();
  497. if (manager == nullptr) {
  498. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  499. }
  500. std::string instance_name = CreateInstanceName(node, 0);
  501. std::vector<AnfNodePtr> replace_input;
  502. replace_input = ReplaceOpInput(replace_op, instance_name, node);
  503. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  504. MS_EXCEPTION_IF_NULL(replace_node);
  505. ScopePtr scope = node->scope();
  506. MS_EXCEPTION_IF_NULL(scope);
  507. replace_node->set_scope(scope);
  508. replace_node->set_in_forward_flag(true);
  509. replace_input[0]->set_scope(scope);
  510. (void)manager->Replace(node, replace_node);
  511. }
  512. void StepReplaceOp(OperatorVector replace_op, const CNodePtr &node) {
  513. // step1:get graph manager distribute_operator
  514. OperatorInfoPtr distribute_operator = node->operator_info();
  515. if (distribute_operator == nullptr) {
  516. MS_LOG(EXCEPTION) << "Failure:AddNode error since distribute_operator is nullptr";
  517. }
  518. FuncGraphPtr func_graph = node->func_graph();
  519. MS_EXCEPTION_IF_NULL(func_graph);
  520. FuncGraphManagerPtr manager = func_graph->manager();
  521. if (manager == nullptr) {
  522. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  523. }
  524. // step2:traverse op_list and insert node
  525. std::reverse(replace_op.begin(), replace_op.end());
  526. auto replace_op_info = distribute_operator->replace_op_info();
  527. std::reverse(replace_op_info.begin(), replace_op_info.end());
  528. if (!replace_op_info.empty() && replace_op_info.size() != replace_op.size()) {
  529. MS_LOG(EXCEPTION) << "replace_op_info is not empty and size not equal to replace_op!";
  530. }
  531. bool replace_op_info_flag = !replace_op_info.empty();
  532. for (size_t index = 0; index < replace_op.size(); ++index) {
  533. std::string instance_name = CreateInstanceName(node, index);
  534. std::vector<AnfNodePtr> replace_input;
  535. if (index != replace_op.size() - 1) {
  536. replace_input = CreateInput(replace_op[index], node, instance_name);
  537. } else {
  538. replace_input = ReplaceOpInput(replace_op[index], instance_name, node);
  539. }
  540. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  541. MS_EXCEPTION_IF_NULL(replace_node);
  542. ScopePtr scope = node->scope();
  543. MS_EXCEPTION_IF_NULL(scope);
  544. replace_node->set_scope(scope);
  545. if (index == replace_op.size() - 1) {
  546. (void)replace_node->set_operator_info(node->operator_info());
  547. }
  548. replace_node->set_in_forward_flag(true);
  549. replace_input[0]->set_scope(scope);
  550. if (replace_op_info_flag && replace_op_info[index].first) {
  551. auto new_cnode = InsertMakeTuple(replace_node, replace_op_info[index].second, func_graph);
  552. (void)manager->Replace(node, new_cnode); // using Replace function to insert node
  553. } else {
  554. (void)manager->Replace(node, replace_node); // using Replace function to insert node
  555. }
  556. }
  557. MS_LOG(INFO) << "Insert ReplaceOp success for " << distribute_operator->name();
  558. }
  559. bool IsSomePrimitive(const CNodePtr &cnode, const std::string &name) {
  560. ValueNodePtr anf_node = cnode->input(0)->cast<ValueNodePtr>();
  561. MS_EXCEPTION_IF_NULL(anf_node);
  562. PrimitivePtr prim = anf_node->value()->cast<PrimitivePtr>();
  563. return (prim->name() == name);
  564. }
  565. void StepReplaceGraph(const std::shared_ptr<std::pair<std::vector<AnfNodePtr>, AnfNodePtr>> &replace_graph,
  566. const CNodePtr &node) {
  567. MS_EXCEPTION_IF_NULL(replace_graph);
  568. MS_EXCEPTION_IF_NULL(node);
  569. MS_EXCEPTION_IF_NULL(replace_graph->second);
  570. FuncGraphPtr func_graph = node->func_graph();
  571. MS_EXCEPTION_IF_NULL(func_graph);
  572. FuncGraphManagerPtr manager = func_graph->manager();
  573. if (manager == nullptr) {
  574. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  575. }
  576. if (!IsSomePrimitive(node, ONEHOT)) {
  577. MS_LOG(EXCEPTION) << "Failure:Only OneHot Primitive will enter StepReplaceGraph!";
  578. }
  579. if (node->inputs().size() != 5) {
  580. MS_LOG(EXCEPTION) << "Failure:There is 5 inputs for the CNode corresponding to OneHot Primitive!";
  581. }
  582. auto pre_node = node->input(1);
  583. if (replace_graph->first.size() != 2) {
  584. MS_LOG(EXCEPTION) << "Failure:replace_graph->first.size() must be 2 for OneHot Primitive!";
  585. }
  586. for (auto &replace_input : replace_graph->first) {
  587. MS_EXCEPTION_IF_NULL(replace_input);
  588. manager->SetEdge(replace_input, 1, pre_node);
  589. CNodePtr replace_input_cnode = replace_input->cast<CNodePtr>();
  590. MS_EXCEPTION_IF_NULL(replace_input_cnode);
  591. (void)replace_input_cnode->set_operator_info(node->operator_info());
  592. replace_input_cnode->set_in_forward_flag(true); // mark this new cnode is forward node
  593. }
  594. // "(void)manager->Replace(replace_graph->first, pre_node);" can not be called
  595. auto replace_output = replace_graph->second;
  596. MS_EXCEPTION_IF_NULL(replace_output);
  597. (void)manager->Replace(node, replace_output);
  598. CNodePtr replace_output_cnode = replace_graph->second->cast<CNodePtr>();
  599. MS_EXCEPTION_IF_NULL(replace_output_cnode);
  600. (void)replace_output_cnode->set_operator_info(node->operator_info());
  601. replace_output_cnode->set_in_forward_flag(true); // mark this new cnode is forward node
  602. }
  603. int32_t GetTupleGetItemIndex(const CNodePtr &cnode) {
  604. MS_EXCEPTION_IF_NULL(cnode);
  605. if (cnode->inputs().size() != 3) {
  606. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is not 3";
  607. }
  608. if (!cnode->input(2)->isa<ValueNode>()) {
  609. MS_LOG(EXCEPTION) << "The index of tuple getitem is not a value node";
  610. }
  611. ValuePtr tuple_index_value = GetValueNode(cnode->input(2));
  612. MS_EXCEPTION_IF_NULL(tuple_index_value);
  613. if (!tuple_index_value->isa<Int32Imm>()) {
  614. MS_LOG(EXCEPTION) << "The index of tuple getitem is not int32";
  615. }
  616. return tuple_index_value->cast<Int32ImmPtr>()->value();
  617. }
  618. // Judge whether the node is a loss, and if there are multiple outputs,
  619. // get which output is a grad according to the tuple getitem.
  620. // Currently, it is not supported that the sens is a tuple.
  621. LossNodeInfo GetLossNodeInfo(const AnfNodePtr &loss_node) {
  622. MS_EXCEPTION_IF_NULL(loss_node);
  623. FuncGraphPtr sub_graph = loss_node->func_graph();
  624. MS_EXCEPTION_IF_NULL(sub_graph);
  625. CNodePtr return_node = sub_graph->get_return();
  626. MS_EXCEPTION_IF_NULL(return_node);
  627. if (return_node->inputs().size() < 2) {
  628. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  629. }
  630. AnfNodePtr pre_node = return_node->input(1);
  631. MS_EXCEPTION_IF_NULL(pre_node);
  632. LossNodeInfo node_info;
  633. // return -> cast
  634. auto pre_cnode = pre_node->cast<CNodePtr>();
  635. MS_EXCEPTION_IF_NULL(pre_cnode);
  636. auto pre_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  637. if (pre_prim->name() == CAST && pre_cnode->operator_info() == nullptr) {
  638. pre_node = pre_cnode->input(1);
  639. }
  640. // return -> loss
  641. if (pre_node == loss_node) {
  642. node_info.has_tuple_getitem = false;
  643. node_info.dout_index = 0;
  644. return node_info;
  645. }
  646. // return -> tuple_getitem -> loss
  647. auto cnode = pre_node->cast<CNodePtr>();
  648. MS_EXCEPTION_IF_NULL(cnode);
  649. auto current_value = cnode->input(0)->cast<ValueNodePtr>();
  650. MS_EXCEPTION_IF_NULL(current_value);
  651. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  652. MS_EXCEPTION_IF_NULL(current_prim);
  653. // size of common cnode is larger than 1
  654. if (cnode->inputs().size() < 2) {
  655. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is smaller than 2";
  656. }
  657. if ((current_prim->name() == TUPLE_GETITEM) && (cnode->input(1) == loss_node)) {
  658. // size of tuple_getitem cnode is 3
  659. auto tuple_index = GetTupleGetItemIndex(cnode);
  660. node_info.has_tuple_getitem = true;
  661. node_info.dout_index = tuple_index;
  662. return node_info;
  663. }
  664. MS_LOG(EXCEPTION) << "Invalid loss";
  665. }
  666. void InsertVirtualDivOp(const VirtualDivOp &virtual_div_op, const CNodePtr &node) {
  667. MS_EXCEPTION_IF_NULL(node);
  668. size_t node_size = node->inputs().size();
  669. FuncGraphPtr func_graph = node->func_graph();
  670. MS_EXCEPTION_IF_NULL(func_graph);
  671. FuncGraphManagerPtr manager = func_graph->manager();
  672. MS_EXCEPTION_IF_NULL(manager);
  673. for (size_t index = 1; index < node_size; ++index) {
  674. AnfNodePtr input = node->input(index);
  675. MS_EXCEPTION_IF_NULL(input);
  676. if (!input->isa<CNode>() && !input->isa<Parameter>()) { // if it is not a tensor, continue
  677. MS_LOG(INFO) << "insert div op: the index " << index << " is not tensor, skip";
  678. continue;
  679. }
  680. for (size_t pos = 0; pos < virtual_div_op.size(); ++pos) {
  681. std::string instance_name = CreateInstanceName(node, pos);
  682. InsertNode(virtual_div_op[pos], node, index, node->input(index), func_graph, instance_name);
  683. }
  684. MS_LOG(INFO) << "insert div op for input index " << index << " of node";
  685. }
  686. }
  687. std::pair<AnfNodePtr, bool> FindParameter(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  688. if (!node->isa<Parameter>() && !node->isa<CNode>() && !node->isa<ValueNode>()) {
  689. return std::make_pair(nullptr, false);
  690. } else if (node->isa<Parameter>()) {
  691. return std::make_pair(node, false);
  692. } else if (node->isa<ValueNode>()) {
  693. if (IsValueNode<RefKey>(node)) {
  694. std::vector<AnfNodePtr> param_v = FindParameterByRefKeyNode(node, func_graph);
  695. if (param_v.size() != 1) {
  696. MS_LOG(EXCEPTION) << "FindParameterByRefKeyNode failed, return vector size must be 1, real is "
  697. << param_v.size();
  698. }
  699. return std::make_pair(node, true);
  700. }
  701. return std::make_pair(nullptr, false);
  702. } else {
  703. CNodePtr cnode = node->cast<CNodePtr>();
  704. MS_EXCEPTION_IF_NULL(cnode);
  705. if (!IsValueNode<Primitive>(cnode->input(0))) {
  706. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  707. if (!FindParameter(cnode->input(index), func_graph).first) {
  708. continue;
  709. }
  710. return FindParameter(cnode->input(index), func_graph);
  711. }
  712. } else {
  713. if (IsParallelCareNode(cnode)) {
  714. return std::make_pair(nullptr, false);
  715. } else {
  716. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  717. MS_EXCEPTION_IF_NULL(prim_anf_node);
  718. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  719. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  720. MS_EXCEPTION_IF_NULL(prim);
  721. if (prim->name() == DEPEND && index != 1) {
  722. continue;
  723. }
  724. if (!FindParameter(cnode->input(index), func_graph).first) {
  725. continue;
  726. }
  727. return FindParameter(cnode->input(index), func_graph);
  728. }
  729. }
  730. }
  731. }
  732. return std::make_pair(nullptr, false);
  733. }
  734. std::pair<bool, CNodePtr> FindCNode(const AnfNodePtr &anode, const std::string &name, const FuncGraphPtr &func_graph) {
  735. MS_EXCEPTION_IF_NULL(anode);
  736. MS_EXCEPTION_IF_NULL(anode->func_graph());
  737. FuncGraphManagerPtr manager = anode->func_graph()->manager();
  738. MS_EXCEPTION_IF_NULL(manager);
  739. AnfNodeIndexSet node_set = manager->node_users()[anode];
  740. bool result = false;
  741. CNodePtr cnode_return = nullptr;
  742. for (auto &node_pair : node_set) {
  743. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  744. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  745. continue;
  746. }
  747. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  748. MS_EXCEPTION_IF_NULL(prim_anf_node);
  749. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  750. MS_EXCEPTION_IF_NULL(node_prim);
  751. if (node_prim->name() == name && node_pair.second == 1) {
  752. if (use_apply->func_graph() == func_graph) {
  753. result = true;
  754. cnode_return = use_apply;
  755. MS_LOG(INFO) << "Find Primitive " << name << " in the same func_graph";
  756. continue;
  757. }
  758. MS_LOG(INFO) << "Find Primitive " << name << " in different func_graph";
  759. }
  760. }
  761. return std::make_pair(result, cnode_return);
  762. }
  763. bool IsCastBeforMirror(const CNodePtr &node, size_t index) {
  764. // only if cast_before_mirror is true, pre node is cast and type is not float32 return true
  765. if (!ParallelContext::GetInstance()->cast_before_mirror()) {
  766. return false;
  767. }
  768. auto pre_node = node->input(index);
  769. MS_EXCEPTION_IF_NULL(pre_node);
  770. auto cnode = pre_node->cast<CNodePtr>();
  771. if (cnode == nullptr || !IsValueNode<Primitive>(cnode->input(0))) {
  772. return false;
  773. }
  774. auto pre_value_node = cnode->input(0)->cast<ValueNodePtr>();
  775. MS_EXCEPTION_IF_NULL(pre_value_node);
  776. auto pre_prim = pre_value_node->value()->cast<PrimitivePtr>();
  777. MS_EXCEPTION_IF_NULL(pre_prim);
  778. if (pre_prim->name() != CAST) {
  779. return false;
  780. }
  781. auto node_type = pre_node->Type();
  782. MS_EXCEPTION_IF_NULL(node_type);
  783. if (!node_type->isa<mindspore::TensorType>()) {
  784. MS_LOG(EXCEPTION) << "Unknown type.";
  785. }
  786. auto input_element_type = node_type->cast<mindspore::TensorTypePtr>()->element();
  787. MS_EXCEPTION_IF_NULL(input_element_type);
  788. auto type_id = input_element_type->type_id();
  789. return (type_id != kNumberTypeFloat32);
  790. }
  791. void InsertMirrorOps(const MirrorOps &mirror_ops, const CNodePtr &node) {
  792. MS_EXCEPTION_IF_NULL(node);
  793. size_t node_size = node->inputs().size();
  794. FuncGraphPtr func_graph = node->func_graph();
  795. MS_EXCEPTION_IF_NULL(func_graph);
  796. FuncGraphManagerPtr manager = func_graph->manager();
  797. MS_EXCEPTION_IF_NULL(manager);
  798. if (mirror_ops.size() != node_size - 1) {
  799. MS_LOG(EXCEPTION) << "Failure:Mirrorops's size is wrong! mirror_ops size is " << mirror_ops.size()
  800. << ", node_size is " << node_size;
  801. }
  802. for (size_t index = 1; index < node_size; ++index) {
  803. OperatorVector backward_op = mirror_ops[index - 1];
  804. if (backward_op.empty()) {
  805. continue;
  806. }
  807. std::pair<AnfNodePtr, bool> param_node_pair = FindParameter(node->input(index), func_graph);
  808. if (!param_node_pair.first) {
  809. continue;
  810. }
  811. // not a RefKey
  812. if (!param_node_pair.second) {
  813. auto next_cnode = FindCNode(param_node_pair.first, MIRROR_OPERATOR, func_graph);
  814. // if there is already a MirrorOp in the same graph, use MirrorOp CNode as a input instead
  815. if (next_cnode.first) {
  816. MS_EXCEPTION_IF_NULL(next_cnode.second);
  817. manager->SetEdge(node, SizeToInt(index), next_cnode.second);
  818. continue;
  819. }
  820. }
  821. // if the parameter found is a RefKey, or no MirrorOp is found in the same graph, insert a new MirrorOp
  822. // only one MirrorOp in backward_op
  823. if (backward_op.size() != 1) {
  824. MS_LOG(EXCEPTION) << "backward_op size must be 1, real is " << backward_op.size();
  825. }
  826. std::string instance_name = MIRROR_OP;
  827. if (IsCastBeforMirror(node, index)) {
  828. for (auto &op : backward_op) {
  829. // insert new node before the node
  830. CNodePtr cnode = node->input(index)->cast<CNodePtr>();
  831. MS_EXCEPTION_IF_NULL(cnode);
  832. AnfNodePtr pre_node = cnode->input(1);
  833. InsertNode(op, cnode, size_t(1), pre_node, func_graph, instance_name);
  834. }
  835. } else {
  836. for (auto &op : backward_op) {
  837. AnfNodePtr pre_node = node->input(index);
  838. InsertNode(op, node, index, pre_node, func_graph, instance_name);
  839. }
  840. }
  841. }
  842. }
  843. void BackwardCommunication(const OperatorInfoPtr &distribute_operator, const CNodePtr &node,
  844. const std::vector<std::pair<CNodePtr, CNodePtr>> &sens_loss_pairs) {
  845. MS_EXCEPTION_IF_NULL(distribute_operator);
  846. MS_EXCEPTION_IF_NULL(node);
  847. bool is_loss_cnode =
  848. std::any_of(sens_loss_pairs.begin(), sens_loss_pairs.end(),
  849. [node](const std::pair<CNodePtr, CNodePtr> &element) { return element.second == node; });
  850. MirrorOps mirror_ops = distribute_operator->mirror_ops();
  851. VirtualDivOp virtual_div_op = distribute_operator->virtual_div_op();
  852. // insert mirror op
  853. if (!mirror_ops.empty()) {
  854. MS_LOG(INFO) << "insert mirror op for " << distribute_operator->name();
  855. InsertMirrorOps(mirror_ops, node);
  856. }
  857. // insert virtual div op
  858. if (!virtual_div_op.empty() && is_loss_cnode) {
  859. MS_LOG(INFO) << "insert virtual div op for " << distribute_operator->name();
  860. InsertVirtualDivOp(virtual_div_op, node);
  861. }
  862. }
  863. std::string GetDisOpName(const std::string &prim_name) {
  864. std::string op_name = prim_name;
  865. if (!prim_name.empty() && (prim_name[0] == '_')) {
  866. op_name = prim_name.substr(1);
  867. }
  868. return op_name + "Info";
  869. }
  870. OperatorInfoPtr OperatorInstanceByName(const std::string &name, const PrimitiveAttrs &attrs,
  871. const std::vector<Shapes> &shape_list) {
  872. if (shape_list.size() != 2) {
  873. MS_LOG(ERROR) << "The size of shape list is not 2";
  874. return nullptr;
  875. }
  876. if (name.length() == 0) {
  877. MS_LOG(EXCEPTION) << "Length of name is zero!";
  878. }
  879. std::string distribute_opname = GetDisOpName(name);
  880. OperatorInfoPtr operator_ =
  881. (OperatorInfoPtr)DynCreator::Instance().Creat(distribute_opname, shape_list[0], shape_list[1], attrs, TOTAL_OPS);
  882. if (operator_ == nullptr) {
  883. MS_LOG(INFO) << "Creat " << name << " failed";
  884. return nullptr;
  885. }
  886. std::string origin_name = operator_->name();
  887. operator_->set_name(origin_name + std::to_string(TOTAL_OPS));
  888. MS_LOG(INFO) << "Successfully created operator " << origin_name;
  889. ++TOTAL_OPS;
  890. return operator_;
  891. }
  892. OperatorInfoPtr OperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  893. const std::vector<Shapes> &shape_list) {
  894. MS_EXCEPTION_IF_NULL(prim);
  895. OperatorInfoPtr operator_ = OperatorInstanceByName(prim->name(), attrs, shape_list);
  896. if (operator_ == nullptr) {
  897. MS_LOG(INFO) << "Creat " << prim->name() << " failed, use batch parallel";
  898. operator_ = OperatorInstanceByName(BATCH_PARALLEL, attrs, shape_list);
  899. MS_EXCEPTION_IF_NULL(operator_);
  900. }
  901. return operator_;
  902. }
  903. OperatorInfoPtr NewOperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  904. std::vector<Shapes> shape_list) {
  905. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  906. for (size_t i = 0; i < shape_list[0].size(); ++i) {
  907. MS_LOG(INFO) << "No: " << i << " input's shape: " << ShapeToString(shape_list[0][i]);
  908. }
  909. return operator_;
  910. }
  911. StrategyPtr ExtractStrategy(std::unordered_map<std::string, ValuePtr> attrs) {
  912. ValueTuplePtr var = attrs[STRATEGY]->cast<ValueTuplePtr>();
  913. StrategyPtr strategyPtr;
  914. MS_LOG(INFO) << "Extract information: strategy " << attrs[STRATEGY]->ToString();
  915. if (var == nullptr) {
  916. MS_LOG(EXCEPTION) << "Strategy value is nullptr";
  917. }
  918. if (var->size() > 0) {
  919. std::vector<ValuePtr> elements = var->value();
  920. std::vector<Dimensions> strategy;
  921. for (uint32_t index = 0; index < elements.size(); ++index) {
  922. Dimensions dim;
  923. if (elements[index]->isa<ValueSequeue>()) {
  924. ValueTuplePtr value_tuple = elements[index]->cast<ValueTuplePtr>();
  925. std::vector<ValuePtr> value_vector = value_tuple->value();
  926. (void)std::transform(value_vector.begin(), value_vector.end(), std::back_inserter(dim),
  927. [](const ValuePtr &value) { return static_cast<int32_t>(GetValue<int>(value)); });
  928. strategy.push_back(dim);
  929. } else {
  930. MS_LOG(EXCEPTION) << "Failure:Strategy's format is wrong! Need ValueSequeue";
  931. }
  932. }
  933. if (strategy.empty()) {
  934. MS_LOG(EXCEPTION) << "ExtractStrategy:failed to extract strategy";
  935. }
  936. strategyPtr = NewStrategy(0, strategy);
  937. }
  938. return strategyPtr;
  939. }
  940. Shapes GetNodeShape(const AnfNodePtr &node) {
  941. MS_EXCEPTION_IF_NULL(node);
  942. Shapes shapes;
  943. BaseShapePtr base_shape_ptr = node->Shape();
  944. if (node->isa<CNode>()) {
  945. auto cnode = node->cast<CNodePtr>();
  946. if (IsValueNode<Primitive>(cnode->input(0))) {
  947. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  948. MS_EXCEPTION_IF_NULL(prim);
  949. if (prim->name() == MAKEREF) {
  950. AnfNodePtr ref_node = cnode->input(1);
  951. auto func_graph = cnode->func_graph();
  952. MS_EXCEPTION_IF_NULL(ref_node);
  953. MS_EXCEPTION_IF_NULL(func_graph);
  954. return GetRefKeyNodeShape(ref_node, func_graph);
  955. }
  956. }
  957. if (cnode->input(0)->isa<CNode>()) {
  958. if (cnode->inputs().size() < 2) {
  959. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " size is samller than 2";
  960. }
  961. base_shape_ptr = cnode->input(1)->Shape();
  962. }
  963. }
  964. if (base_shape_ptr == nullptr) {
  965. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " shape_ptr is nullptr, full name is "
  966. << node->fullname_with_scope();
  967. }
  968. auto tuple_shape_ptr = dyn_cast<abstract::TupleShape>(base_shape_ptr);
  969. if (tuple_shape_ptr != nullptr) {
  970. auto tuple_shape = tuple_shape_ptr->shape();
  971. for (auto &shape : tuple_shape) {
  972. auto each_shape = dyn_cast<abstract::Shape>(shape);
  973. MS_EXCEPTION_IF_NULL(each_shape);
  974. shapes.push_back(each_shape->shape());
  975. }
  976. } else {
  977. auto shape_ptr = dyn_cast<abstract::Shape>(base_shape_ptr);
  978. MS_EXCEPTION_IF_NULL(shape_ptr);
  979. shapes.push_back(shape_ptr->shape());
  980. }
  981. return shapes;
  982. }
  983. std::vector<AnfNodePtr> FindParameterByRefKeyNode(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  984. MS_EXCEPTION_IF_NULL(node);
  985. MS_EXCEPTION_IF_NULL(func_graph);
  986. std::vector<AnfNodePtr> parameters;
  987. if (!IsValueNode<RefKey>(node)) {
  988. MS_LOG(ERROR) << "The node is not a ref key";
  989. return parameters;
  990. }
  991. auto ref_key = GetValueNode<RefKeyPtr>(node);
  992. MS_EXCEPTION_IF_NULL(ref_key);
  993. auto name = ref_key->tag();
  994. auto manager = func_graph->manager();
  995. MS_EXCEPTION_IF_NULL(manager);
  996. auto roots = manager->roots();
  997. if (roots.size() != 1) {
  998. MS_LOG(ERROR) << "The size of roots ( " << roots.size() << " ) is not 1";
  999. return parameters;
  1000. }
  1001. FuncGraphPtr root_g = roots.back();
  1002. MS_EXCEPTION_IF_NULL(root_g);
  1003. for (auto &param_node : root_g->parameters()) {
  1004. auto param = param_node->cast<ParameterPtr>();
  1005. if (param && (name == param->name())) {
  1006. parameters.push_back(param_node);
  1007. MS_LOG(INFO) << "The name of ref key is: " << name;
  1008. return parameters;
  1009. }
  1010. }
  1011. MS_LOG(ERROR) << "The name of ref key is: " << name << ", but have not found the parameter";
  1012. return parameters;
  1013. }
  1014. Shapes GetRefKeyNodeShape(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1015. MS_EXCEPTION_IF_NULL(node);
  1016. MS_EXCEPTION_IF_NULL(func_graph);
  1017. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(node, func_graph);
  1018. if (parameters.size() != 1) {
  1019. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1020. }
  1021. Shapes input_shapes;
  1022. input_shapes = GetNodeShape(parameters[0]);
  1023. if (input_shapes.size() != 1) {
  1024. MS_LOG(EXCEPTION) << "Get input shape failed";
  1025. }
  1026. MS_LOG(INFO) << "The parameter shape is " << ShapeToString(input_shapes[0]);
  1027. return input_shapes;
  1028. }
  1029. std::vector<Shapes> ExtractShape(const CNodePtr &node) {
  1030. MS_EXCEPTION_IF_NULL(node);
  1031. Shapes shape_inputs, shape_outputs;
  1032. std::vector<Shapes> shape_all;
  1033. std::vector<AnfNodePtr> all_inputs = node->inputs();
  1034. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  1035. size_t inputs_size = all_inputs.size();
  1036. for (size_t i = 1; i < inputs_size; ++i) {
  1037. Shapes input_shapes;
  1038. AnfNodePtr input = all_inputs[i];
  1039. if (IsValueNode<RefKey>(input)) {
  1040. auto func_graph = node->func_graph();
  1041. MS_EXCEPTION_IF_NULL(func_graph);
  1042. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(input, func_graph);
  1043. if (parameters.size() != 1) {
  1044. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1045. }
  1046. std::pair<AnfNodePtr, int> node_pair = std::make_pair(node, SizeToInt(i));
  1047. g_RefMap[parameters[0]] = node_pair;
  1048. input_shapes = GetRefKeyNodeShape(input, func_graph);
  1049. } else if (IsValueNode<Tensor>(input) || input->isa<CNode>() || input->isa<Parameter>()) {
  1050. input_shapes = GetNodeShape(input);
  1051. } else {
  1052. continue;
  1053. }
  1054. if (input_shapes.size() != 1) {
  1055. MS_LOG(EXCEPTION) << "ExtractShape:Get input shape failed";
  1056. }
  1057. shape_inputs.push_back(input_shapes[0]);
  1058. }
  1059. shape_all.push_back(shape_inputs);
  1060. // extract out shape
  1061. shape_outputs = GetNodeShape(node);
  1062. shape_all.push_back(shape_outputs);
  1063. return shape_all;
  1064. }
  1065. std::pair<AnfNodePtr, int> FindParallelCareNode(const AnfNodePtr &node) {
  1066. MS_EXCEPTION_IF_NULL(node);
  1067. FuncGraphPtr func_graph = node->func_graph();
  1068. MS_EXCEPTION_IF_NULL(func_graph);
  1069. FuncGraphManagerPtr manager = func_graph->manager();
  1070. MS_EXCEPTION_IF_NULL(manager);
  1071. AnfNodeIndexSet node_set = manager->node_users()[node];
  1072. for (auto &node_pair : node_set) {
  1073. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1074. MS_EXCEPTION_IF_NULL(cnode);
  1075. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1076. continue;
  1077. }
  1078. ValueNodePtr prim_node_anf = cnode->input(0)->cast<ValueNodePtr>();
  1079. MS_EXCEPTION_IF_NULL(prim_node_anf);
  1080. PrimitivePtr node_prim = prim_node_anf->value()->cast<PrimitivePtr>();
  1081. MS_EXCEPTION_IF_NULL(node_prim);
  1082. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1083. continue;
  1084. }
  1085. if (IsParallelCareNode(cnode) && cnode->operator_info() != nullptr) {
  1086. return node_pair;
  1087. } else if (FindParallelCareNode(node_pair.first).first != nullptr) {
  1088. return FindParallelCareNode(node_pair.first);
  1089. }
  1090. }
  1091. return std::make_pair(nullptr, 0);
  1092. }
  1093. std::pair<AnfNodePtr, int> FindSubGraph(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  1094. MS_EXCEPTION_IF_NULL(graph);
  1095. MS_EXCEPTION_IF_NULL(parameter);
  1096. FuncGraphManagerPtr manager = graph->manager();
  1097. MS_EXCEPTION_IF_NULL(manager);
  1098. std::pair<AnfNodePtr, int> prim_anf_node_pair = FindParallelCareNode(parameter);
  1099. if (prim_anf_node_pair.first != nullptr) {
  1100. return prim_anf_node_pair;
  1101. } else {
  1102. AnfNodeIndexSet param_sub_set = manager->node_users()[parameter];
  1103. for (auto &param_pair : param_sub_set) {
  1104. CNodePtr graph_cnode = param_pair.first->cast<CNodePtr>();
  1105. if ((graph_cnode == nullptr) || !graph_cnode->input(0)->isa<CNode>()) {
  1106. continue;
  1107. }
  1108. CNodePtr graph_cnode_inp0 = graph_cnode->input(0)->cast<CNodePtr>();
  1109. if (!IsValueNode<FuncGraph>(graph_cnode_inp0->input(1))) {
  1110. continue;
  1111. }
  1112. FuncGraphPtr graph_sub = GetValueNode<FuncGraphPtr>(graph_cnode_inp0->input(1));
  1113. auto parameters = graph_sub->parameters();
  1114. if (IntToSize(param_pair.second - 1) >= parameters.size()) {
  1115. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1116. << parameters.size();
  1117. }
  1118. std::pair<AnfNodePtr, int> res = FindSubGraph(graph_sub, parameters[IntToSize(param_pair.second - 1)]);
  1119. if (res.first != nullptr) {
  1120. return res;
  1121. }
  1122. }
  1123. }
  1124. return std::make_pair(nullptr, 0);
  1125. }
  1126. void SetParallelShape(const AnfNodePtr &parameter, const std::pair<AnfNodePtr, int> &res) {
  1127. MS_EXCEPTION_IF_NULL(parameter);
  1128. AbstractBasePtr abstract = parameter->abstract();
  1129. MS_EXCEPTION_IF_NULL(abstract);
  1130. MS_LOG(DEBUG) << "SetParallelShape " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1131. CNodePtr cnode = res.first->cast<CNodePtr>();
  1132. MS_EXCEPTION_IF_NULL(cnode);
  1133. OperatorInfoPtr distribute_operator = cnode->operator_info();
  1134. if (distribute_operator == nullptr) {
  1135. MS_LOG(EXCEPTION) << "Failure:node " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1136. }
  1137. if (IntToSize(res.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1138. MS_LOG(EXCEPTION) << "The index is out of range, index is " << res.second - 1 << ", vector size is "
  1139. << distribute_operator->inputs_tensor_info().size();
  1140. }
  1141. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(res.second - 1)];
  1142. Shape slice_shape = tensorinfo_in.slice_shape();
  1143. MS_LOG(DEBUG) << "SetParallelShape slice_shape " << parameter->ToString() << " shape "
  1144. << MakeValue(slice_shape)->ToString();
  1145. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1146. MS_EXCEPTION_IF_NULL(parallel_shape);
  1147. // Don't modify it in-place as the pointer of this AbstractValue may used as cache key in StaticAnalysis.
  1148. auto cloned_abstract = abstract->Clone();
  1149. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1150. cloned_abstract->set_shape(parallel_shape);
  1151. parameter->set_abstract(cloned_abstract);
  1152. TensorLayout tensor_layout = tensorinfo_in.tensor_layout();
  1153. ParameterPtr parameter_ptr = parameter->cast<ParameterPtr>();
  1154. MS_EXCEPTION_IF_NULL(parameter_ptr);
  1155. parameter_ptr->set_tensor_layout(std::make_shared<TensorLayout>(tensor_layout));
  1156. }
  1157. void CoverSliceShape(const FuncGraphPtr &root) {
  1158. MS_EXCEPTION_IF_NULL(root);
  1159. auto parameters = root->parameters();
  1160. for (auto &parameter : parameters) {
  1161. MS_EXCEPTION_IF_NULL(parameter->Shape());
  1162. auto iter = g_RefMap.find(parameter);
  1163. if (iter != g_RefMap.end()) {
  1164. SetParallelShape(parameter, g_RefMap[parameter]);
  1165. continue;
  1166. }
  1167. std::pair<AnfNodePtr, int> res = FindSubGraph(root, parameter);
  1168. if (res.first == nullptr) {
  1169. MS_LOG(INFO) << "Parameter " << parameter->ToString() << " don't need to set parallel shape";
  1170. } else {
  1171. SetParallelShape(parameter, res);
  1172. MS_LOG(DEBUG) << "Parameter " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1173. }
  1174. }
  1175. g_RefMap.clear();
  1176. }
  1177. bool ParameterIsCloned(const FuncGraphPtr &root, const AnfNodePtr &parameter_node) {
  1178. MS_EXCEPTION_IF_NULL(root);
  1179. MS_EXCEPTION_IF_NULL(parameter_node);
  1180. FuncGraphManagerPtr manager = root->manager();
  1181. MS_EXCEPTION_IF_NULL(manager);
  1182. auto cloned_parameter = parameter_node->cast<ParameterPtr>();
  1183. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1184. // find the clone parameter
  1185. if (!cloned_parameter->has_default()) {
  1186. return false;
  1187. }
  1188. py::object clone_info = parse::python_adapter::GetPyObjAttr(cloned_parameter->default_param(), CLONE_INFO);
  1189. bool cloned = py::cast<bool>(parse::python_adapter::GetPyObjAttr(clone_info, CLONED));
  1190. if (!cloned) {
  1191. return false;
  1192. }
  1193. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name() << " is cloned";
  1194. return true;
  1195. }
  1196. void SetClonedTensorShapeForOptimizer(const FuncGraphPtr &root) {
  1197. MS_EXCEPTION_IF_NULL(root);
  1198. for (auto &cloned_parameter_node : root->parameters()) {
  1199. MS_EXCEPTION_IF_NULL(cloned_parameter_node);
  1200. auto cloned_parameter = cloned_parameter_node->cast<ParameterPtr>();
  1201. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1202. if (!ParameterIsCloned(root, cloned_parameter_node)) {
  1203. continue;
  1204. }
  1205. // get the cloned index
  1206. py::object cloned_info = parse::python_adapter::GetPyObjAttr(cloned_parameter->default_param(), CLONE_INFO);
  1207. int32_t cloned_index = py::cast<int32_t>(parse::python_adapter::GetPyObjAttr(cloned_info, CLONED_INDEX));
  1208. // find the be cloned parameter
  1209. bool found_be_cloned_parameter = false;
  1210. ParameterPtr cloned_from_parameter = nullptr;
  1211. AnfNodePtr cloned_from_node = nullptr;
  1212. for (auto &be_cloned_parameter_node : root->parameters()) {
  1213. MS_EXCEPTION_IF_NULL(be_cloned_parameter_node);
  1214. auto be_cloned_parameter = be_cloned_parameter_node->cast<ParameterPtr>();
  1215. MS_EXCEPTION_IF_NULL(be_cloned_parameter);
  1216. if (!be_cloned_parameter->has_default()) {
  1217. continue;
  1218. }
  1219. py::object be_cloned_info = parse::python_adapter::GetPyObjAttr(be_cloned_parameter->default_param(), CLONE_INFO);
  1220. if (!py::cast<bool>(parse::python_adapter::GetPyObjAttr(be_cloned_info, BE_CLONED))) {
  1221. continue;
  1222. }
  1223. // get the be cloned index
  1224. py::list be_cloned_index = parse::python_adapter::GetPyObjAttr(be_cloned_info, BE_CLONED_INDEX);
  1225. for (auto &index : be_cloned_index) {
  1226. if (cloned_index == py::cast<int32_t>(index)) {
  1227. found_be_cloned_parameter = true;
  1228. cloned_from_parameter = be_cloned_parameter;
  1229. cloned_from_node = be_cloned_parameter_node;
  1230. break;
  1231. }
  1232. }
  1233. }
  1234. if (found_be_cloned_parameter) {
  1235. // set the shape and tensor layout for cloned parameter
  1236. cloned_parameter->set_tensor_layout(cloned_from_parameter->tensor_layout());
  1237. MS_EXCEPTION_IF_NULL(cloned_parameter_node->abstract());
  1238. MS_EXCEPTION_IF_NULL(cloned_from_node->abstract());
  1239. auto cloned_abstract = cloned_parameter_node->abstract()->Clone();
  1240. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1241. cloned_abstract->set_shape(cloned_from_node->abstract()->GetShapeTrack());
  1242. cloned_parameter_node->set_abstract(cloned_abstract);
  1243. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name()
  1244. << " is cloned, the be cloned parameter is: " << cloned_from_parameter->name()
  1245. << ", clone index is: " << cloned_index;
  1246. } else {
  1247. MS_LOG(EXCEPTION) << "The parameter: " << cloned_parameter->name() << " is cloned, cloned index is "
  1248. << cloned_index << ", but not found the be cloned parameter";
  1249. }
  1250. }
  1251. }
  1252. void SetVirtualDatasetStrategy(const CNodePtr &node) {
  1253. MS_EXCEPTION_IF_NULL(node);
  1254. PrimitivePtr prim = GetValueNode<PrimitivePtr>(node->input(0));
  1255. MS_EXCEPTION_IF_NULL(prim);
  1256. if (prim->name() == VIRTUAL_DATA_SET) {
  1257. CheckGlobalDeviceManager();
  1258. int32_t dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1259. auto attrs_temp = prim->attrs();
  1260. std::vector<Shapes> shape_list = ExtractShape(node);
  1261. if (shape_list.empty()) {
  1262. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1263. }
  1264. std::vector<ValuePtr> elements;
  1265. for (size_t i = 0; i < shape_list[0].size(); i++) {
  1266. if (shape_list[0][i].empty()) {
  1267. MS_LOG(EXCEPTION) << "shape_list[ " << i << " ].size() is zero";
  1268. }
  1269. std::vector<int32_t> input_strategy = {dev_num};
  1270. for (size_t j = 1; j < shape_list[0][i].size(); j++) {
  1271. input_strategy.push_back(1);
  1272. }
  1273. elements.push_back(MakeValue(input_strategy));
  1274. }
  1275. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1276. attrs_temp[STRATEGY] = strategy;
  1277. (void)prim->SetAttrs(attrs_temp);
  1278. }
  1279. }
  1280. void ExtractInformation(const std::vector<AnfNodePtr> &all_nodes) {
  1281. for (auto &node : all_nodes) {
  1282. auto cnode = node->cast<CNodePtr>();
  1283. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1284. continue;
  1285. }
  1286. SetVirtualDatasetStrategy(cnode);
  1287. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1288. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1289. auto attrs = prim->attrs();
  1290. MS_LOG(INFO) << "extract information: node: " << node->ToString() << " prim " << prim->name();
  1291. if (IsParallelCareNode(cnode)) {
  1292. std::vector<Shapes> shape_list = ExtractShape(cnode);
  1293. if (shape_list.empty()) {
  1294. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1295. }
  1296. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1297. if (operator_ == nullptr) {
  1298. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->name() << " OperatorInstance failed";
  1299. }
  1300. auto &inputs = cnode->inputs();
  1301. std::vector<ValuePtr> input_value;
  1302. for (size_t index = 1; index < inputs.size(); ++index) {
  1303. if (inputs[index]->isa<ValueNode>()) {
  1304. input_value.push_back(GetValueNode(inputs[index]));
  1305. } else {
  1306. input_value.emplace_back(nullptr);
  1307. }
  1308. }
  1309. StrategyPtr strategyPtr = nullptr;
  1310. (*operator_).set_input_value(input_value);
  1311. (*operator_).set_outputs_dtype(cnode->Type());
  1312. (*operator_).set_cnode(cnode);
  1313. if (prim->name() == RESHAPE) {
  1314. (void)cnode->set_operator_info(operator_);
  1315. continue;
  1316. }
  1317. if (!StrategyFound(attrs)) {
  1318. MS_LOG(INFO) << "ExtractInformation: the strategy of node " << node->ToString() << " prim " << prim->name()
  1319. << " is empty, using batch parallel";
  1320. std::shared_ptr<std::vector<Dimensions>> strategy_v_ptr = operator_->GenerateBatchStrategies();
  1321. if (strategy_v_ptr == nullptr) {
  1322. MS_LOG(EXCEPTION) << "Failure:Generate batch parallel strategy failed";
  1323. }
  1324. std::vector<ValuePtr> elements;
  1325. for (size_t i = 0; i < strategy_v_ptr->size(); i++) {
  1326. elements.push_back(MakeValue((*strategy_v_ptr)[i]));
  1327. }
  1328. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1329. // display the strategy generated by batch parallel
  1330. attrs[GEN_STRATEGY] = strategy;
  1331. (void)prim->SetAttrs(attrs);
  1332. MS_LOG(INFO) << "node " << node->ToString() << " prim " << prim->name() << " batch parallel strategy is "
  1333. << attrs[GEN_STRATEGY]->ToString();
  1334. strategyPtr = NewStrategy(0, *strategy_v_ptr);
  1335. } else {
  1336. strategyPtr = ExtractStrategy(attrs);
  1337. }
  1338. if (strategyPtr != nullptr) {
  1339. if (operator_->Init(strategyPtr) == FAILED) {
  1340. MS_LOG(EXCEPTION) << "Failure:operator " << prim->name() << " init failed";
  1341. }
  1342. (void)cnode->set_operator_info(operator_);
  1343. } else {
  1344. MS_LOG(EXCEPTION) << "ERROR:strategy_ptr is nullptr";
  1345. }
  1346. }
  1347. }
  1348. }
  1349. TensorLayout GetInputLayoutFromCNode(const std::pair<AnfNodePtr, int> &node_pair) {
  1350. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1351. MS_EXCEPTION_IF_NULL(cnode);
  1352. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1353. MS_EXCEPTION_IF_NULL(distribute_operator);
  1354. int index = node_pair.second;
  1355. if (index > SizeToInt(distribute_operator->inputs_tensor_info().size())) {
  1356. MS_LOG(EXCEPTION) << "The index is out of range, the node_pair.second is " << index - 1 << ", the vector size is "
  1357. << distribute_operator->inputs_tensor_info().size();
  1358. }
  1359. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  1360. TensorLayout tensorlayout_in = tensorinfo_in.tensor_layout();
  1361. return tensorlayout_in;
  1362. }
  1363. // if reshape's output connect to several primitive, return the first layout found
  1364. std::shared_ptr<TensorLayout> FindNextLayout(const CNodePtr &cnode) {
  1365. MS_EXCEPTION_IF_NULL(cnode);
  1366. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  1367. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  1368. MS_EXCEPTION_IF_NULL(manager);
  1369. AnfNodeIndexSet node_set = manager->node_users()[cnode];
  1370. for (auto &node_pair : node_set) {
  1371. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1372. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1373. continue;
  1374. }
  1375. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1376. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1377. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1378. MS_EXCEPTION_IF_NULL(node_prim);
  1379. MS_LOG(INFO) << "FindNextLayout prim " << node_prim->name();
  1380. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1381. continue;
  1382. }
  1383. if (IsParallelCareNode(use_apply) && (use_apply->operator_info() != nullptr)) {
  1384. MS_LOG(INFO) << "FindNextLayout success prim " << node_prim->name();
  1385. auto layout = GetInputLayoutFromCNode(node_pair);
  1386. return std::make_shared<TensorLayout>(layout);
  1387. }
  1388. MS_LOG(DEBUG) << "FindNextLayout failed prim " << node_prim->name() << " " << IsParallelCareNode(use_apply)
  1389. << " " << (use_apply->operator_info() != nullptr);
  1390. auto layout_ptr = FindNextLayout(use_apply);
  1391. if (layout_ptr) {
  1392. return layout_ptr;
  1393. }
  1394. }
  1395. MS_LOG(WARNING) << "FindNextLayout return nullptr, if reshape is not the last primitive, there must be some error";
  1396. return nullptr;
  1397. }
  1398. std::shared_ptr<TensorLayout> GetOutputLayoutFromCNode(const CNodePtr &cnode, size_t output_index) {
  1399. MS_EXCEPTION_IF_NULL(cnode);
  1400. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1401. MS_EXCEPTION_IF_NULL(distribute_operator);
  1402. if (distribute_operator->outputs_tensor_info().size() < output_index) {
  1403. MS_LOG(EXCEPTION) << "outputs_tensor_info size is " << distribute_operator->inputs_tensor_info().size()
  1404. << ", must be less than output_index " << output_index;
  1405. }
  1406. TensorInfo tensorinfo_out = distribute_operator->outputs_tensor_info()[output_index];
  1407. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  1408. return std::make_shared<TensorLayout>(tensorlayout_out);
  1409. }
  1410. std::shared_ptr<TensorLayout> FindPrevParallelCareNodeLayout(const AnfNodePtr &node, size_t output_index) {
  1411. if (!node->isa<CNode>()) {
  1412. return nullptr;
  1413. }
  1414. CNodePtr cnode = node->cast<CNodePtr>();
  1415. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1416. return nullptr;
  1417. }
  1418. if (IsParallelCareNode(cnode) && (cnode->operator_info() != nullptr)) {
  1419. auto layout_ptr = GetOutputLayoutFromCNode(cnode, output_index);
  1420. if (!layout_ptr) {
  1421. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1422. }
  1423. return layout_ptr;
  1424. }
  1425. return nullptr;
  1426. }
  1427. std::shared_ptr<TensorLayout> CreateParameterLayout(const AnfNodePtr &node) {
  1428. // Create DataParallel tensor layout for parameter(support WideDeep).
  1429. CheckGlobalDeviceManager();
  1430. int32_t dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1431. TensorLayout input_tensor_layout;
  1432. // create input_shape
  1433. Shapes inputs_shape = GetNodeShape(node);
  1434. Shape input_shape_array = inputs_shape[0];
  1435. if (input_shape_array.empty()) {
  1436. MS_LOG(EXCEPTION) << "Don't support reshape a scalar parameter.";
  1437. }
  1438. // create tensor_map
  1439. size_t shape_size = input_shape_array.size();
  1440. TensorMap input_tensor_map_array(SizeToInt(shape_size) - 1, -1);
  1441. input_tensor_map_array.insert(input_tensor_map_array.begin(), 0);
  1442. // create dev_matrix
  1443. Shape dev_matrix_array = {dev_num};
  1444. if (input_tensor_layout.InitFromVector(dev_matrix_array, input_tensor_map_array, input_shape_array) != SUCCESS) {
  1445. MS_LOG(EXCEPTION) << "Create tensor layout for parameter failed.";
  1446. }
  1447. return std::make_shared<TensorLayout>(input_tensor_layout);
  1448. }
  1449. std::shared_ptr<TensorLayout> FindPrevLayout(const AnfNodePtr &node) {
  1450. if (node->isa<Parameter>()) {
  1451. return CreateParameterLayout(node);
  1452. }
  1453. if (!node->isa<CNode>()) {
  1454. return nullptr;
  1455. }
  1456. CNodePtr cnode = node->cast<CNodePtr>();
  1457. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1458. return nullptr;
  1459. }
  1460. if (IsParallelCareNode(cnode) && (cnode->operator_info() != nullptr)) {
  1461. auto layout_ptr = GetOutputLayoutFromCNode(cnode, 0);
  1462. if (!layout_ptr) {
  1463. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1464. }
  1465. return layout_ptr;
  1466. }
  1467. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1468. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1469. if (prim->name() == TUPLE_GETITEM) {
  1470. auto tuple_index = GetTupleGetItemIndex(cnode);
  1471. auto layout_ptr = FindPrevParallelCareNodeLayout(cnode->input(1), IntToSize(tuple_index));
  1472. if (!layout_ptr) {
  1473. MS_LOG(EXCEPTION)
  1474. << " Failure:FindPrevLayout failed, tuple_getitem before reshape, but there does not exit a parallel care node "
  1475. "before tuple_getitem!";
  1476. }
  1477. return layout_ptr;
  1478. }
  1479. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1480. if (prim->name() == DEPEND && index != 1) {
  1481. continue;
  1482. }
  1483. auto layout_ptr = FindPrevLayout(cnode->inputs()[index]);
  1484. if (!layout_ptr) {
  1485. continue;
  1486. }
  1487. return layout_ptr;
  1488. }
  1489. MS_LOG(WARNING) << "FindPrevLayout return nullptr, if reshape is not the first primitive, there must be some error";
  1490. return nullptr;
  1491. }
  1492. void ReshapeInit(const std::vector<AnfNodePtr> &all_nodes) {
  1493. for (auto &node : all_nodes) {
  1494. auto cnode = node->cast<CNodePtr>();
  1495. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1496. continue;
  1497. }
  1498. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1499. if (!IsParallelCareNode(cnode) || (cnode->operator_info() == nullptr)) {
  1500. continue;
  1501. }
  1502. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1503. MS_EXCEPTION_IF_NULL(prim);
  1504. OperatorInfoPtr operator_info = cnode->operator_info();
  1505. if (operator_info == nullptr) {
  1506. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->ToString() << " OperatorInstance is nullptr";
  1507. }
  1508. if (prim->name() != RESHAPE) {
  1509. continue;
  1510. }
  1511. auto attrs = prim->attrs();
  1512. if (StrategyFound(attrs)) {
  1513. MS_LOG(EXCEPTION) << "Setting strategy for Reshape goes for nothing!";
  1514. }
  1515. MS_ASSERT(cnode->inputs().size() == 3);
  1516. auto prev_layout_ptr = FindPrevLayout(cnode->input(1));
  1517. if (prev_layout_ptr) {
  1518. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1519. reshape_info_ptr->SetInputLayout(*prev_layout_ptr);
  1520. }
  1521. auto next_layout_ptr = FindNextLayout(cnode);
  1522. if (next_layout_ptr) {
  1523. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1524. reshape_info_ptr->SetOutputLayout(*next_layout_ptr);
  1525. }
  1526. if (operator_info->Init(nullptr) == FAILED) {
  1527. MS_LOG(EXCEPTION) << "Failure:operator " << prim->ToString() << " init failed";
  1528. }
  1529. }
  1530. }
  1531. CNodePtr FindLossCNode(const FuncGraphPtr &func_graph) {
  1532. MS_EXCEPTION_IF_NULL(func_graph);
  1533. CNodePtr return_node = func_graph->get_return();
  1534. MS_EXCEPTION_IF_NULL(return_node);
  1535. if (return_node->size() < 2) {
  1536. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  1537. }
  1538. AnfNodePtr pre_node = return_node->input(1);
  1539. MS_EXCEPTION_IF_NULL(pre_node);
  1540. auto pre_cnode = pre_node->cast<CNodePtr>();
  1541. MS_EXCEPTION_IF_NULL(pre_cnode);
  1542. auto current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1543. // return -> cast
  1544. if (current_prim->name() == CAST && pre_cnode->operator_info() == nullptr) {
  1545. pre_cnode = pre_cnode->input(1)->cast<CNodePtr>();
  1546. MS_EXCEPTION_IF_NULL(pre_cnode);
  1547. current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1548. }
  1549. // notice: the GetNext op has not input
  1550. if (INVALID_LOSS_OPS.find(current_prim->name()) != INVALID_LOSS_OPS.end()) {
  1551. MS_LOG(INFO) << "The loss is: " << current_prim->name();
  1552. return pre_cnode;
  1553. }
  1554. // size of common cnode is larger than 1
  1555. if (pre_cnode->size() < 2) {
  1556. MS_LOG(EXCEPTION) << pre_cnode->ToString() << " size( " << pre_cnode->inputs().size() << " ) is smaller than 2";
  1557. }
  1558. // return -> tuple_getitem -> loss
  1559. if (current_prim->name() == TUPLE_GETITEM) {
  1560. AnfNodePtr pre_pre_node = pre_cnode->input(1);
  1561. MS_EXCEPTION_IF_NULL(pre_pre_node);
  1562. auto pre_pre_cnode = pre_pre_node->cast<CNodePtr>();
  1563. auto value = pre_pre_cnode->input(0)->cast<ValueNodePtr>();
  1564. MS_EXCEPTION_IF_NULL(value);
  1565. PrimitivePtr prim = value->value()->cast<PrimitivePtr>();
  1566. MS_EXCEPTION_IF_NULL(prim);
  1567. MS_LOG(DEBUG) << "The loss name is " << prim->name();
  1568. return pre_pre_cnode;
  1569. }
  1570. // return -> make_tuple
  1571. if (current_prim->name() == MAKE_TUPLE) {
  1572. MS_LOG(EXCEPTION) << "The loss have make_tuple, it is not supported";
  1573. }
  1574. // return -> loss
  1575. MS_LOG(DEBUG) << "The loss name is " << current_prim->name();
  1576. return pre_cnode;
  1577. }
  1578. TensorLayouts GetLossNodeGradOutputLayout(const CNodePtr &loss_cnode) {
  1579. TensorLayouts ret;
  1580. MS_EXCEPTION_IF_NULL(loss_cnode);
  1581. AnfNodePtr node = loss_cnode->cast<AnfNodePtr>();
  1582. MS_EXCEPTION_IF_NULL(node);
  1583. LossNodeInfo node_info = GetLossNodeInfo(node);
  1584. ValueNodePtr prim_anf_node = loss_cnode->input(0)->cast<ValueNodePtr>();
  1585. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1586. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1587. MS_EXCEPTION_IF_NULL(prim);
  1588. if (INVALID_LOSS_OPS.find(prim->name()) != INVALID_LOSS_OPS.end()) {
  1589. MS_LOG(WARNING) << "The loss name is: " << prim->name() << ", do nothing for split sens now";
  1590. return ret;
  1591. }
  1592. OperatorInfoPtr operator_info = loss_cnode->operator_info();
  1593. MS_EXCEPTION_IF_NULL(operator_info);
  1594. TensorInfo loss_grad_tensor_info;
  1595. size_t op_output_size = operator_info->outputs_tensor_info().size();
  1596. MS_LOG(INFO) << "The loss name is " << operator_info->name() << ", the has tuple item is "
  1597. << node_info.has_tuple_getitem << ", the output size is " << op_output_size << ", the dout_index is "
  1598. << node_info.dout_index;
  1599. if ((op_output_size == 0) || (op_output_size <= IntToSize(node_info.dout_index))) {
  1600. MS_LOG(EXCEPTION) << "The index is " << node_info.dout_index << ", but the size of outputs is " << op_output_size;
  1601. }
  1602. if (!node_info.has_tuple_getitem && (op_output_size > 1)) {
  1603. MS_LOG(EXCEPTION) << "Currently, it is not supported that the sens is a tuple.";
  1604. }
  1605. loss_grad_tensor_info = operator_info->outputs_tensor_info()[IntToSize(node_info.dout_index)];
  1606. ret.push_back(loss_grad_tensor_info.tensor_layout());
  1607. return ret;
  1608. }
  1609. void SplitSens(const CNodePtr &grad_sens_node, const TensorLayout &loss_grad_layout) {
  1610. MS_EXCEPTION_IF_NULL(grad_sens_node);
  1611. if (grad_sens_node->size() <= 1) {
  1612. MS_LOG(EXCEPTION) << "The size of grad sens node is smaller than 2";
  1613. }
  1614. AnfNodePtr sens_tensor_node = grad_sens_node->input(1);
  1615. MS_EXCEPTION_IF_NULL(sens_tensor_node);
  1616. Shapes sens_shapes = GetNodeShape(sens_tensor_node);
  1617. if (sens_shapes.size() != 1) {
  1618. MS_LOG(EXCEPTION) << "GetNodeShape for sens_tensor_node, output size is not 1";
  1619. }
  1620. // If the shape of sens tensor is [] or [1], no need to split it.
  1621. Shape sens_shape = sens_shapes[0];
  1622. if (sens_shape.empty() || ((sens_shape.size() == 1) && (sens_shape[0] == 1))) {
  1623. if (sens_tensor_node->isa<Parameter>()) {
  1624. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1625. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1626. sens_tensor_param->set_tensor_layout(std::make_shared<TensorLayout>(loss_grad_layout));
  1627. }
  1628. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", no need to split sens";
  1629. return;
  1630. }
  1631. auto loss_shape = loss_grad_layout.tensor_shape().array();
  1632. if (loss_shape != sens_shape) {
  1633. MS_LOG(EXCEPTION) << "The shape of sens is not equal to loss output, it is unsupported now. Sens shape is "
  1634. << ShapeToString(sens_shape) << ", loss shape is " << ShapeToString(loss_shape);
  1635. }
  1636. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", split it.";
  1637. if (!IsValueNode<Tensor>(sens_tensor_node)) {
  1638. if (sens_tensor_node->isa<Parameter>()) {
  1639. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1640. AbstractBasePtr abstract = sens_tensor_node->abstract();
  1641. MS_EXCEPTION_IF_NULL(abstract);
  1642. auto slice_shape = loss_grad_layout.slice_shape().array();
  1643. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1644. MS_EXCEPTION_IF_NULL(parallel_shape);
  1645. auto cloned_abstract = abstract->Clone();
  1646. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1647. cloned_abstract->set_shape(parallel_shape);
  1648. sens_tensor_node->set_abstract(cloned_abstract);
  1649. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1650. sens_tensor_param->set_tensor_layout(std::make_shared<TensorLayout>(loss_grad_layout));
  1651. return;
  1652. }
  1653. MS_LOG(EXCEPTION) << "The type of sens node is not Tensor or Parameter, it is unsupported now.";
  1654. }
  1655. // Use _GetTensorSlice operator to split the sens tensor
  1656. FuncGraphPtr func_graph = grad_sens_node->func_graph(); // only cnode can get the graph
  1657. MS_EXCEPTION_IF_NULL(func_graph);
  1658. Operator op = CreateGetTensorSliceOp(loss_grad_layout);
  1659. InsertGetTensorSliceOp(op, grad_sens_node, func_graph, 1, SPLIT_SENS);
  1660. }
  1661. void InsertForwardOps(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1662. MS_EXCEPTION_IF_NULL(distribute_operator);
  1663. MS_EXCEPTION_IF_NULL(cnode);
  1664. OperatorVector forward_op = distribute_operator->forward_op();
  1665. if (!forward_op.empty()) {
  1666. MS_LOG(INFO) << "Insert forward op for " << distribute_operator->name();
  1667. ForwardCommunication(forward_op, cnode);
  1668. }
  1669. }
  1670. void StepReplace(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1671. MS_EXCEPTION_IF_NULL(distribute_operator);
  1672. MS_EXCEPTION_IF_NULL(cnode);
  1673. // StepReplaceOp
  1674. OperatorVector replace_op = distribute_operator->replace_op();
  1675. if (!replace_op.empty()) {
  1676. MS_LOG(INFO) << "StepReplaceOp " << cnode->ToString();
  1677. StepReplaceOp(replace_op, cnode);
  1678. }
  1679. // StepReplaceGraph: after calling StepReplaceGraph, cnode can not be used anymore.
  1680. ReplaceGraphPtr replace_graph = distribute_operator->replace_graph(cnode);
  1681. if (!replace_op.empty() && replace_graph) {
  1682. MS_LOG(EXCEPTION) << "Only one of replace_op or replace_op can be used";
  1683. }
  1684. if (replace_graph) {
  1685. MS_LOG(INFO) << "StepReplaceGraph " << cnode->ToString();
  1686. StepReplaceGraph(replace_graph, cnode);
  1687. }
  1688. }
  1689. void HandleDropoutNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1690. MS_EXCEPTION_IF_NULL(distribute_operator);
  1691. MS_EXCEPTION_IF_NULL(cnode);
  1692. std::string op_name = distribute_operator->name();
  1693. if (op_name.find(DROPOUT_DO_MASK) == std::string::npos) {
  1694. return;
  1695. }
  1696. DropoutDoMaskInfoPtr dropout_do_mask = std::dynamic_pointer_cast<DropoutDoMaskInfo>(distribute_operator);
  1697. MS_EXCEPTION_IF_NULL(dropout_do_mask);
  1698. Operator replace_op = dropout_do_mask->GetDropoutGenMaskReplaceOp(cnode);
  1699. if (cnode->inputs().size() != DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  1700. MS_LOG(EXCEPTION) << "The size of drop out do mask cnode's input is not " << DROPOUT_DO_MASK_CNODE_INPUT_SIZE;
  1701. }
  1702. ReplaceOneOp(replace_op, cnode->input(DROPOUT_GEN_MASK_INDEX)->cast<CNodePtr>());
  1703. }
  1704. void HandleSpecialNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1705. HandleDropoutNode(distribute_operator, cnode);
  1706. }
  1707. std::set<FuncGraphPtr> FindForwardGraphByRootNodes(const AnfNodeSet &root_all_nodes) {
  1708. // J->CNode->Graph
  1709. std::set<FuncGraphPtr> graph_set;
  1710. for (auto &node : root_all_nodes) {
  1711. MS_EXCEPTION_IF_NULL(node);
  1712. if (!node->isa<CNode>()) {
  1713. continue;
  1714. }
  1715. auto cnode = node->cast<CNodePtr>();
  1716. if ((cnode->size() < 2) || !IsValueNode<Primitive>(cnode->input(0))) {
  1717. continue;
  1718. }
  1719. auto expect_j_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1720. if (expect_j_prim->name() != J) {
  1721. continue;
  1722. }
  1723. if (IsValueNode<FuncGraph>(cnode->input(1))) {
  1724. auto graph = GetValueNode<FuncGraphPtr>(cnode->input(1));
  1725. MS_LOG(DEBUG) << "Find the forward graph success";
  1726. graph_set.insert(graph);
  1727. }
  1728. }
  1729. return graph_set;
  1730. }
  1731. void StepSplitSens(const std::pair<CNodePtr, CNodePtr> &sens_loss_pair) {
  1732. CNodePtr sens_node = sens_loss_pair.first;
  1733. CNodePtr loss_node = sens_loss_pair.second;
  1734. auto loss_grad_layout = GetLossNodeGradOutputLayout(loss_node);
  1735. if (!loss_grad_layout.empty()) {
  1736. SplitSens(sens_node, loss_grad_layout[0]);
  1737. }
  1738. }
  1739. std::vector<CNodePtr> FindLossCNodeFromRoot(const FuncGraphPtr &root) {
  1740. MS_EXCEPTION_IF_NULL(root);
  1741. AnfNodePtr root_return_node = root->get_return();
  1742. MS_EXCEPTION_IF_NULL(root_return_node);
  1743. std::vector<CNodePtr> loss_node;
  1744. const auto &all_nodes = root->nodes();
  1745. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  1746. if (graph_set.empty()) {
  1747. loss_node.push_back(FindLossCNode(root));
  1748. }
  1749. (void)std::transform(graph_set.begin(), graph_set.end(), std::back_inserter(loss_node),
  1750. [](const FuncGraphPtr &graph) { return FindLossCNode(graph); });
  1751. return loss_node;
  1752. }
  1753. // Sens node satisfies the following conditions: cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  1754. std::vector<std::pair<CNodePtr, CNodePtr>> GetSensLossPairs(const FuncGraphPtr &root) {
  1755. MS_EXCEPTION_IF_NULL(root);
  1756. std::vector<std::pair<CNodePtr, CNodePtr>> sens_loss_pairs;
  1757. for (auto &node : root->nodes()) {
  1758. if (!node->isa<CNode>()) {
  1759. continue;
  1760. }
  1761. // cnode(sens)-->cnode(tuple_getitem)
  1762. auto sens_cnode = node->cast<CNodePtr>();
  1763. AnfNodePtr expect_tuple_getitem = sens_cnode->input(0);
  1764. MS_EXCEPTION_IF_NULL(expect_tuple_getitem);
  1765. if (!expect_tuple_getitem->isa<CNode>()) {
  1766. continue;
  1767. }
  1768. auto expect_tuple_getitem_cnode = expect_tuple_getitem->cast<CNodePtr>();
  1769. if (!IsSomePrimitive(expect_tuple_getitem_cnode, TUPLE_GETITEM)) {
  1770. continue;
  1771. }
  1772. // cnode(sens)-->cnode(tuple_getitem)-->cnode
  1773. AnfNodePtr expect_anonymous = expect_tuple_getitem_cnode->input(1);
  1774. MS_EXCEPTION_IF_NULL(expect_anonymous);
  1775. if (!expect_anonymous->isa<CNode>()) {
  1776. continue;
  1777. }
  1778. // cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  1779. auto expect_anonymous_cnode = expect_anonymous->cast<CNodePtr>();
  1780. AnfNodePtr expect_j = expect_anonymous_cnode->input(0);
  1781. MS_EXCEPTION_IF_NULL(expect_j);
  1782. if (!expect_j->isa<CNode>()) {
  1783. continue;
  1784. }
  1785. auto expect_j_cnode = expect_j->cast<CNodePtr>();
  1786. if (!IsSomePrimitive(expect_j_cnode, J)) {
  1787. continue;
  1788. }
  1789. if (!IsValueNode<FuncGraph>(expect_j_cnode->input(1))) {
  1790. MS_LOG(EXCEPTION) << "Sens can't find the corresponding graph.";
  1791. }
  1792. auto func_graph = GetValueNode<FuncGraphPtr>(expect_j_cnode->input(1));
  1793. auto loss_cnode = FindLossCNode(func_graph);
  1794. std::pair<CNodePtr, CNodePtr> sens_loss_pair = std::make_pair(sens_cnode, loss_cnode);
  1795. sens_loss_pairs.push_back(sens_loss_pair);
  1796. }
  1797. return sens_loss_pairs;
  1798. }
  1799. void ParallelCommunication(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes,
  1800. const FuncGraphManagerPtr &manager) {
  1801. MS_EXCEPTION_IF_NULL(root);
  1802. MS_EXCEPTION_IF_NULL(manager);
  1803. TensorRedistribution tensor_redistribution;
  1804. std::vector<std::pair<CNodePtr, CNodePtr>> sens_loss_pairs = GetSensLossPairs(root);
  1805. bool has_backward = !sens_loss_pairs.empty();
  1806. // split sens must before inserting the operators.
  1807. for (auto &pair : sens_loss_pairs) {
  1808. // If the shape of grad-sens tensor is not [] or [1], use get tensor slice to handel it.
  1809. // If the type of sens node is not Tensor, it is unsupported now, do nothing default.
  1810. StepSplitSens(pair);
  1811. }
  1812. for (auto &node : all_nodes) {
  1813. MS_EXCEPTION_IF_NULL(node);
  1814. if (node->isa<CNode>()) {
  1815. auto cnode = node->cast<CNodePtr>();
  1816. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1817. continue;
  1818. }
  1819. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1820. if (distribute_operator == nullptr) {
  1821. continue;
  1822. }
  1823. // insert forward ops
  1824. InsertForwardOps(distribute_operator, cnode);
  1825. // insert redistribution ops
  1826. StepRedistribution(cnode, distribute_operator, cnode, tensor_redistribution, cnode);
  1827. // insert backward ops
  1828. if (has_backward) {
  1829. BackwardCommunication(distribute_operator, cnode, sens_loss_pairs);
  1830. }
  1831. // StepReplace
  1832. StepReplace(distribute_operator, cnode);
  1833. HandleSpecialNode(distribute_operator, cnode);
  1834. } else if (IsValueNode<Tensor>(node)) {
  1835. StepSplitTensor(node, manager);
  1836. }
  1837. }
  1838. }
  1839. namespace {
  1840. void RevertSymbolicKeyInstance(const FuncGraphPtr &root, const AnfNodePtr &node) {
  1841. MS_EXCEPTION_IF_NULL(root);
  1842. MS_EXCEPTION_IF_NULL(node);
  1843. auto symbolic_key = GetValueNode<SymbolicKeyInstancePtr>(node);
  1844. MS_EXCEPTION_IF_NULL(symbolic_key);
  1845. auto all_upstream_node = root->manager()->node_users()[node];
  1846. for (auto &upstream_node : all_upstream_node) {
  1847. FuncGraphPtr fg = upstream_node.first->func_graph();
  1848. if (symbolic_key->node()->isa<Parameter>()) {
  1849. for (auto &param : root->parameters()) {
  1850. if (*param == *symbolic_key->node()) {
  1851. AnfNodePtr reverted_node = root->NewCNode({NewValueNode(prim::kPrimEmbed), param});
  1852. MS_EXCEPTION_IF_NULL(reverted_node);
  1853. MS_LOG(DEBUG) << "before replace " << node->ToString() << " to node " << reverted_node->DebugString();
  1854. (void)fg->manager()->Replace(node, reverted_node);
  1855. MS_LOG(DEBUG) << "revert node " << node->ToString() << " to node " << reverted_node->DebugString();
  1856. }
  1857. }
  1858. }
  1859. }
  1860. }
  1861. } // namespace
  1862. void HandleSymbolicKeyInstance(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes) {
  1863. MS_EXCEPTION_IF_NULL(root);
  1864. for (auto &node : all_nodes) {
  1865. // revert back SymbolicKeyInstance to embed() primitive
  1866. if (IsValueNode<SymbolicKeyInstance>(node)) {
  1867. RevertSymbolicKeyInstance(root, node);
  1868. continue;
  1869. }
  1870. }
  1871. }
  1872. void CheckpointStrategy(const FuncGraphPtr &func_graph) {
  1873. MS_EXCEPTION_IF_NULL(func_graph);
  1874. MS_LOG(INFO) << "Save strategy to checkpoint begin";
  1875. StrategyMap straMap;
  1876. auto ret = func_graph->get_return();
  1877. auto all_nodes = DeepScopedGraphSearch(ret);
  1878. for (auto &node : all_nodes) {
  1879. MS_EXCEPTION_IF_NULL(node);
  1880. auto cnode = node->cast<CNodePtr>();
  1881. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1882. continue;
  1883. }
  1884. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1885. MS_EXCEPTION_IF_NULL(prim);
  1886. OperatorInfoPtr operator_info = cnode->operator_info();
  1887. if (operator_info) {
  1888. if (prim->instance_name().empty()) {
  1889. continue;
  1890. }
  1891. std::string instance_name = prim->instance_name();
  1892. StrategyPtr strategyPtr = operator_info->strategy();
  1893. MS_EXCEPTION_IF_NULL(node->scope());
  1894. std::string node_name = node->scope()->name() + std::string(CONNSYMBOL) + instance_name;
  1895. straMap[node_name] = strategyPtr;
  1896. }
  1897. }
  1898. if (StrategyCheckpoint::GetInstance().Save(straMap) != SUCCESS) {
  1899. MS_LOG(EXCEPTION) << "Save strategy checkpoint failed";
  1900. }
  1901. }
  1902. void RestoreStrategy(const FuncGraphPtr &func_graph) {
  1903. MS_EXCEPTION_IF_NULL(func_graph);
  1904. MS_LOG(INFO) << "Extract strategy from checkpoint begin";
  1905. StrategyMap straMap;
  1906. if (StrategyCheckpoint::GetInstance().Load(&straMap) != SUCCESS) {
  1907. MS_LOG(EXCEPTION) << "Load strategy checkpoint failed";
  1908. }
  1909. if (StrategyCheckpoint::GetInstance().RemoveCheckPoint() != SUCCESS) {
  1910. MS_LOG(EXCEPTION) << "Remove strategy checkpoint failed";
  1911. }
  1912. auto ret = func_graph->get_return();
  1913. auto all_nodes = DeepScopedGraphSearch(ret);
  1914. for (auto &node : all_nodes) {
  1915. MS_EXCEPTION_IF_NULL(node);
  1916. auto cnode = node->cast<CNodePtr>();
  1917. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1918. continue;
  1919. }
  1920. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1921. MS_EXCEPTION_IF_NULL(prim);
  1922. OperatorInfoPtr operator_info = cnode->operator_info();
  1923. if (operator_info) {
  1924. if (prim->instance_name().empty()) {
  1925. continue;
  1926. }
  1927. std::string instance_name = prim->instance_name();
  1928. MS_EXCEPTION_IF_NULL(node->scope());
  1929. std::string node_name = node->scope()->name() + std::string(CONNSYMBOL) + instance_name;
  1930. MS_LOG(INFO) << "Node name is " << node_name;
  1931. if (straMap.find(node_name) != straMap.end()) {
  1932. StrategyPtr strategyPtr = straMap[node_name];
  1933. operator_info->set_strategy(strategyPtr);
  1934. }
  1935. }
  1936. }
  1937. }
  1938. void SetForwardFlag(const std::vector<AnfNodePtr> &all_nodes) {
  1939. for (auto &node : all_nodes) {
  1940. MS_EXCEPTION_IF_NULL(node);
  1941. if (!node->isa<CNode>()) {
  1942. continue;
  1943. }
  1944. auto cnode = node->cast<CNodePtr>();
  1945. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1946. continue;
  1947. }
  1948. // CNode is globally unique.
  1949. MS_LOG(DEBUG) << "Set forward flag " << cnode->DebugString() << ".";
  1950. cnode->set_in_forward_flag(true);
  1951. }
  1952. }
  1953. void SetForwardFlag(const AnfNodeSet &all_nodes) {
  1954. for (auto &node : all_nodes) {
  1955. MS_EXCEPTION_IF_NULL(node);
  1956. if (!node->isa<CNode>()) {
  1957. continue;
  1958. }
  1959. auto cnode = node->cast<CNodePtr>();
  1960. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1961. continue;
  1962. }
  1963. // CNode is globally unique.
  1964. cnode->set_in_forward_flag(true);
  1965. }
  1966. }
  1967. std::set<FuncGraphPtr> ForwardGraph(const FuncGraphPtr &root) {
  1968. MS_EXCEPTION_IF_NULL(root);
  1969. const auto &all_nodes = root->nodes();
  1970. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  1971. return graph_set;
  1972. }
  1973. std::vector<AnfNodePtr> FindRootForwardCNode(const FuncGraphPtr &graph, const AnfNodeSet &all_nodes) {
  1974. MS_EXCEPTION_IF_NULL(graph);
  1975. auto loss_cnode = FindLossCNode(graph);
  1976. MS_EXCEPTION_IF_NULL(loss_cnode);
  1977. auto loss_cnode_id = loss_cnode->UniqueIdThroughCopy();
  1978. std::vector<AnfNodePtr> root_forward_nodes;
  1979. for (auto &node : all_nodes) {
  1980. MS_EXCEPTION_IF_NULL(node);
  1981. if (!node->isa<CNode>()) {
  1982. continue;
  1983. }
  1984. auto cnode = node->cast<CNodePtr>();
  1985. auto root_node_id = node->UniqueIdThroughCopy();
  1986. if (loss_cnode_id == root_node_id) {
  1987. root_forward_nodes = DeepLinkedGraphSearch(cnode);
  1988. break;
  1989. }
  1990. }
  1991. return root_forward_nodes;
  1992. }
  1993. void MarkForwardCNode(const FuncGraphPtr &root) {
  1994. MS_EXCEPTION_IF_NULL(root);
  1995. auto all_nodes = root->nodes();
  1996. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  1997. if (graph_set.empty()) {
  1998. MS_LOG(INFO) << "Can not find the forward graph, so mark the ops in root graph";
  1999. SetForwardFlag(all_nodes);
  2000. } else {
  2001. for (auto &func_graph : graph_set) {
  2002. MS_LOG(INFO) << "The sub graph size of root is " << root->func_graphs_used().size();
  2003. auto return_node = func_graph->get_return();
  2004. MS_EXCEPTION_IF_NULL(return_node);
  2005. auto all_dfs_nodes = DeepLinkedGraphSearch(return_node);
  2006. SetForwardFlag(all_dfs_nodes);
  2007. auto root_forward_nodes = FindRootForwardCNode(func_graph, all_nodes);
  2008. if (root_forward_nodes.empty()) {
  2009. continue;
  2010. }
  2011. // Mark forward flag for the nodes in root graph.
  2012. SetForwardFlag(root_forward_nodes);
  2013. }
  2014. }
  2015. }
  2016. Status ParallelInit() {
  2017. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2018. int32_t device_num = ParallelContext::GetInstance()->device_num();
  2019. int32_t global_rank = ParallelContext::GetInstance()->global_rank();
  2020. std::string backend = ParallelContext::GetInstance()->communication_backend();
  2021. std::string world_group;
  2022. if (backend == HCCL_BACKEND) {
  2023. world_group = HCCL_WORLD_GROUP;
  2024. } else if (backend == NCCL_BACKEND) {
  2025. world_group = NCCL_WORLD_GROUP;
  2026. } else {
  2027. MS_LOG(EXCEPTION) << "Invalid communication backend: " << backend;
  2028. }
  2029. uint32_t world_rank_size = 0;
  2030. if (!ParallelContext::GetInstance()->device_num_is_set()) {
  2031. if (!CommManager::GetInstance().GetRankSize(world_group, &world_rank_size)) {
  2032. MS_LOG(EXCEPTION) << "Get rank size failed";
  2033. }
  2034. device_num = UintToInt(world_rank_size);
  2035. MS_LOG(INFO) << "Get device num from communication model, the device num is " << device_num;
  2036. }
  2037. uint32_t rank_id = 0;
  2038. if (!ParallelContext::GetInstance()->global_rank_is_set()) {
  2039. if (!CommManager::GetInstance().GetRankID(world_group, &rank_id)) {
  2040. MS_LOG(EXCEPTION) << "Get rank id failed";
  2041. }
  2042. global_rank = UintToInt(rank_id);
  2043. MS_LOG(INFO) << "Get global rank from communication model, the global rank is " << global_rank;
  2044. }
  2045. if (!InitDevice(device_num, global_rank, backend)) {
  2046. MS_LOG(ERROR) << "Init device failed";
  2047. return FAILED;
  2048. }
  2049. MS_LOG(INFO) << "The parallel context: dev num: " << device_num << ", global rank: " << global_rank
  2050. << ", backend: " << backend << ", mirror_mean: " << ParallelContext::GetInstance()->mirror_mean()
  2051. << ", cast_before_mirror: " << ParallelContext::GetInstance()->cast_before_mirror();
  2052. return SUCCESS;
  2053. }
  2054. bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) {
  2055. MS_EXCEPTION_IF_NULL(root);
  2056. MS_EXCEPTION_IF_NULL(optimizer);
  2057. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2058. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2059. // assume no change to graph
  2060. bool changes = false;
  2061. // control whether use model_parallel mode
  2062. if (((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) ||
  2063. (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) {
  2064. return changes;
  2065. }
  2066. struct timeval start_time, end_time;
  2067. (void)gettimeofday(&start_time, nullptr);
  2068. MS_LOG(INFO) << "Now entering step parallel";
  2069. DumpGraph(root, std::string(STEP_PARALLEL_BEGIN));
  2070. pipeline::ResourceBasePtr res = optimizer->resource();
  2071. MS_EXCEPTION_IF_NULL(res);
  2072. FuncGraphManagerPtr manager = res->manager();
  2073. MS_EXCEPTION_IF_NULL(manager);
  2074. AnfNodePtr ret = root->get_return();
  2075. MS_EXCEPTION_IF_NULL(ret);
  2076. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  2077. std::reverse(all_nodes.begin(), all_nodes.end());
  2078. if (parallel_mode != AUTO_PARALLEL) {
  2079. TOTAL_OPS = 0;
  2080. if (ParallelInit() != SUCCESS) {
  2081. MS_LOG(EXCEPTION) << "Parallel init failed";
  2082. }
  2083. // mark the forward cnodes, parallel only care these nodes
  2084. MarkForwardCNode(root);
  2085. if (FindCommunicationOp(all_nodes)) {
  2086. MS_LOG(EXCEPTION) << "The graph contain communication op";
  2087. }
  2088. // extract shape and strategy, set operator_info
  2089. ExtractInformation(all_nodes);
  2090. ReshapeInit(all_nodes);
  2091. // extract strategy from checkpoint for multi-train
  2092. if (StrategyCheckpoint::GetInstance().CheckPointOn() && StrategyCheckpoint::GetInstance().CheckPointExit()) {
  2093. RestoreStrategy(root);
  2094. }
  2095. }
  2096. // save strategy as checkpoint for multi-train
  2097. if (StrategyCheckpoint::GetInstance().CheckPointOn() &&
  2098. StrategyCheckpoint::GetInstance().GetCurrentTrainTime() < StrategyCheckpoint::GetInstance().GetTrainTimes()) {
  2099. CheckpointStrategy(root);
  2100. }
  2101. HandleSymbolicKeyInstance(root, all_nodes);
  2102. // cover Parallel shape
  2103. CoverSliceShape(root);
  2104. // set the shape for optimizer's clone tensor
  2105. SetClonedTensorShapeForOptimizer(root);
  2106. // ForwardCommunication BackwardCommunication TensorRedistribution
  2107. ParallelCommunication(root, all_nodes, manager);
  2108. DumpGraph(root, std::string(STEP_PARALLEL_END));
  2109. // step parallel only run once
  2110. root->flags()[SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY] = true;
  2111. res->results()[pipeline::kStepParallelGraph] = root;
  2112. (void)gettimeofday(&end_time, nullptr);
  2113. uint64_t time = kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec - start_time.tv_sec);
  2114. time += static_cast<uint64_t>(end_time.tv_usec - start_time.tv_usec);
  2115. MS_LOG(INFO) << "Now leaving step parallel, used time: " << time << " us";
  2116. return changes;
  2117. }
  2118. // Needed by rec_parser
  2119. std::vector<std::string> ExtractInputsTensorName(const CNodePtr &node) {
  2120. std::vector<std::string> name_inputs;
  2121. std::vector<AnfNodePtr> all_inputs = node->inputs();
  2122. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  2123. std::string node_id = node->UniqueId();
  2124. name_inputs.push_back(node_id);
  2125. for (auto &input : node_inputs) {
  2126. std::string name = input->UniqueId();
  2127. name_inputs.push_back(name);
  2128. }
  2129. return name_inputs;
  2130. }
  2131. } // namespace parallel
  2132. } // namespace mindspore