You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

step_parallel.cc 117 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "frontend/parallel/step_parallel.h"
  17. #include <inttypes.h>
  18. #include <sys/time.h>
  19. #include <algorithm>
  20. #include <map>
  21. #include <memory>
  22. #include <set>
  23. #include <string>
  24. #include <unordered_map>
  25. #include <utility>
  26. #include "ir/tensor.h"
  27. #include "ir/param_info.h"
  28. #include "frontend/operator/ops.h"
  29. #include "frontend/optimizer/optimizer.h"
  30. #include "frontend/parallel/auto_parallel/graph_costmodel.h"
  31. #include "frontend/parallel/context.h"
  32. #include "frontend/parallel/device_manager.h"
  33. #include "frontend/parallel/dynamic_creator.h"
  34. #include "frontend/parallel/graph_util/generate_graph.h"
  35. #include "frontend/parallel/graph_util/graph_info.h"
  36. #include "frontend/parallel/graph_util/node_info.h"
  37. #include "frontend/parallel/node_check.h"
  38. #include "frontend/parallel/ops_info/matmul_info.h"
  39. #include "frontend/parallel/strategy_checkpoint/parallel_strategy_checkpoint.h"
  40. #include "utils/comm_manager.h"
  41. #include "utils/symbolic.h"
  42. #include "utils/ms_context.h"
  43. using mindspore::tensor::Tensor;
  44. namespace mindspore {
  45. namespace parallel {
  46. static const std::set<std::string> COMMUNICATION_OPS = {ALL_REDUCE, ALL_GATHER, ALL_TO_ALL, REDUCE_SCATTER};
  47. static const std::set<std::string> INVALID_LOSS_OPS = {GET_NEXT, VIRTUALLOSS};
  48. // g_RefMap, for CNode B input i is a RefKey[Parameter C],
  49. // it will be one item in map with key: C, and value: (B, i)
  50. static std::map<AnfNodePtr, std::pair<AnfNodePtr, int>> g_RefMap;
  51. void SetCommunicationOpGroupLabel(std::vector<AnfNodePtr> new_node_input) {
  52. if (new_node_input.empty()) {
  53. return;
  54. }
  55. ValueNodePtr prim_anf_node = new_node_input[0]->cast<ValueNodePtr>();
  56. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  57. MS_EXCEPTION_IF_NULL(prim);
  58. auto attrs = prim->attrs();
  59. auto iter = attrs.find(GROUP);
  60. if (iter != attrs.end()) {
  61. auto value = iter->second;
  62. MS_EXCEPTION_IF_NULL(value);
  63. if (value->isa<StringImm>()) {
  64. std::string hash_name = value->cast<StringImmPtr>()->value();
  65. MS_EXCEPTION_IF_NULL(g_device_manager);
  66. std::string rank_list_name = g_device_manager->FindRankListNameByHashName(hash_name);
  67. (void)prim->AddAttr(GROUP_RANKS, MakeValue(rank_list_name));
  68. }
  69. }
  70. }
  71. std::vector<AnfNodePtr> CreateInput(const Operator &op, const AnfNodePtr &node, const std::string &instance_name) {
  72. MS_EXCEPTION_IF_NULL(node);
  73. OperatorArgs arg_forward = op.second;
  74. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op.first, instance_name);
  75. MS_EXCEPTION_IF_NULL(pyop_instance);
  76. OperatorParams params = arg_forward.second;
  77. std::vector<AnfNodePtr> new_node_input = {NewValueNode(pyop_instance), node};
  78. if (!params.empty()) {
  79. for (auto &param : params) {
  80. AnfNodePtr val = NewValueNode(param.first.second);
  81. MS_EXCEPTION_IF_NULL(val);
  82. int32_t position = param.second;
  83. (void)new_node_input.insert(new_node_input.begin() + position, val);
  84. }
  85. }
  86. // if the op have 'group' attr, set the rank list name for the op
  87. SetCommunicationOpGroupLabel(new_node_input);
  88. return new_node_input;
  89. }
  90. void InsertNode(const Operator &op, const CNodePtr &node, size_t index, const AnfNodePtr &pre_node,
  91. const FuncGraphPtr &func_graph, const std::string &instance_name) {
  92. // insert new node before the node
  93. FuncGraphManagerPtr manager = func_graph->manager();
  94. MS_EXCEPTION_IF_NULL(manager);
  95. ScopePtr scope = node->scope();
  96. MS_EXCEPTION_IF_NULL(scope);
  97. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  98. CNodePtr new_node = func_graph->NewCNode(node_input);
  99. MS_EXCEPTION_IF_NULL(new_node);
  100. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  101. new_node->set_in_forward_flag(true); // mark forward flag
  102. }
  103. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  104. MS_EXCEPTION_IF_NULL(new_node_value);
  105. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  106. new_node_prim->set_instance_name(instance_name);
  107. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  108. new_node->set_scope(scope);
  109. node_input[0]->set_scope(scope);
  110. manager->SetEdge(node, SizeToInt(index), new_node);
  111. MS_LOG(INFO) << "Insert " << instance_name << " success";
  112. }
  113. std::string CreateInstanceName(const CNodePtr &node, size_t index) {
  114. MS_EXCEPTION_IF_NULL(node);
  115. if (!IsValueNode<Primitive>(node->input(0))) {
  116. MS_LOG(EXCEPTION) << "CreateInstanceName: " << node->ToString() << " doesn't have primitive";
  117. }
  118. std::string name_base = node->fullname_with_scope();
  119. std::string name = name_base + "_" + std::to_string(index);
  120. std::string instance_name = HashInstanceName(name);
  121. return instance_name;
  122. }
  123. void ForwardCommunication(OperatorVector forward_op, const CNodePtr &node) {
  124. MS_EXCEPTION_IF_NULL(node);
  125. // step1:get graph manager distribute_operator
  126. FuncGraphPtr func_graph = node->func_graph();
  127. MS_EXCEPTION_IF_NULL(func_graph);
  128. FuncGraphManagerPtr manager = func_graph->manager();
  129. MS_EXCEPTION_IF_NULL(manager);
  130. auto uses_set = manager->node_users()[node];
  131. CNodePtr node_to_insert = node;
  132. for (auto &uses_pair : uses_set) {
  133. auto uses_cnode = uses_pair.first->cast<CNodePtr>();
  134. MS_EXCEPTION_IF_NULL(uses_cnode);
  135. if (!IsValueNode<Primitive>(uses_cnode->input(0))) {
  136. break;
  137. }
  138. PrimitivePtr value_node_prim = GetValueNode<PrimitivePtr>(uses_cnode->input(0));
  139. MS_EXCEPTION_IF_NULL(value_node_prim);
  140. if (value_node_prim->name() == TUPLE_GETITEM) {
  141. if (uses_set.size() > 1) {
  142. MS_LOG(EXCEPTION) << "Now only support one output, but got " << uses_set.size();
  143. }
  144. node_to_insert = uses_cnode;
  145. }
  146. }
  147. MS_EXCEPTION_IF_NULL(node_to_insert);
  148. std::reverse(forward_op.begin(), forward_op.end());
  149. // step2:traverse op_list and insert node
  150. for (size_t index = 0; index < forward_op.size(); ++index) {
  151. std::string instance_name_base = FORWARD_OP;
  152. std::string instance_name = instance_name_base + "_" + CreateInstanceName(node, index);
  153. std::vector<AnfNodePtr> forward_input = CreateInput(forward_op[index], node_to_insert, instance_name);
  154. CNodePtr forward_node = func_graph->NewCNode(forward_input); // using NewCNode to creat anfnode
  155. MS_EXCEPTION_IF_NULL(forward_node);
  156. ScopePtr scope = node->scope();
  157. MS_EXCEPTION_IF_NULL(scope);
  158. forward_node->set_scope(scope);
  159. forward_node->set_in_forward_flag(true);
  160. forward_input[0]->set_scope(scope);
  161. (void)manager->Replace(node_to_insert, forward_node); // using Replace function to insert node
  162. }
  163. }
  164. CNodePtr InsertMakeTuple(const AnfNodePtr &prev, uint32_t num, const FuncGraphPtr &func_graph) {
  165. MS_EXCEPTION_IF_NULL(prev);
  166. MS_EXCEPTION_IF_NULL(func_graph);
  167. std::vector<AnfNodePtr> make_tuple_inputs;
  168. make_tuple_inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
  169. for (uint32_t i = 0; i < num; i++) {
  170. std::vector<AnfNodePtr> tuple_get_item_inputs{NewValueNode(prim::kPrimTupleGetItem), prev,
  171. CreatInt32Imm(UintToInt(i))};
  172. auto tuple_get_item = func_graph->NewCNode(tuple_get_item_inputs);
  173. MS_EXCEPTION_IF_NULL(tuple_get_item);
  174. make_tuple_inputs.push_back(tuple_get_item);
  175. }
  176. auto make_tuple = func_graph->NewCNode(make_tuple_inputs);
  177. MS_EXCEPTION_IF_NULL(make_tuple);
  178. FuncGraphManagerPtr manager = func_graph->manager();
  179. MS_EXCEPTION_IF_NULL(manager);
  180. (void)manager->Replace(prev, make_tuple);
  181. return make_tuple;
  182. }
  183. void InsertRedistribution(const RedistributionOpListPtr &redistribution_oplist_ptr, const CNodePtr &node,
  184. const FuncGraphPtr &func_graph, int pos, const CNodePtr &pre_node) {
  185. MS_EXCEPTION_IF_NULL(node);
  186. MS_EXCEPTION_IF_NULL(pre_node);
  187. MS_EXCEPTION_IF_NULL(func_graph);
  188. FuncGraphManagerPtr manager = func_graph->manager();
  189. MS_EXCEPTION_IF_NULL(manager);
  190. if ((redistribution_oplist_ptr->first).size() != (redistribution_oplist_ptr->second).size()) {
  191. MS_LOG(EXCEPTION) << "size of OperatorVector and OutPutInfoVector must be the same!";
  192. }
  193. for (size_t index = 0; index < (redistribution_oplist_ptr->first).size(); ++index) {
  194. if (pos >= SizeToInt(node->inputs().size())) {
  195. MS_LOG(EXCEPTION) << "InsertRedistribution:pos can't be larger than node's inputs'size";
  196. }
  197. // Creat new node
  198. AnfNodePtr target_node = node->input(IntToSize(pos));
  199. MS_EXCEPTION_IF_NULL(target_node);
  200. // Creat instance_name
  201. auto op = (redistribution_oplist_ptr->first)[index];
  202. std::string op_name = (redistribution_oplist_ptr->first)[index].first;
  203. std::string instance_name_base = REDISTRIBUTION_OP;
  204. std::string instance_name = instance_name_base + "_" + CreateInstanceName(pre_node, index) + op_name;
  205. InsertNode(op, node, IntToSize(pos), target_node, func_graph, instance_name);
  206. if ((redistribution_oplist_ptr->second)[index].first) {
  207. target_node = node->input(IntToSize(pos));
  208. MS_EXCEPTION_IF_NULL(target_node);
  209. (void)InsertMakeTuple(target_node, (redistribution_oplist_ptr->second)[index].second, func_graph);
  210. }
  211. }
  212. }
  213. void InsertGetTensorSliceOp(const Operator &op, const CNodePtr &node, const FuncGraphPtr &func_graph, int pos,
  214. const std::string &instance_name) {
  215. if (func_graph == nullptr) {
  216. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: the graph is null, the instance name is " << instance_name;
  217. }
  218. FuncGraphManagerPtr manager = func_graph->manager();
  219. MS_EXCEPTION_IF_NULL(manager);
  220. if (pos >= SizeToInt(node->inputs().size())) {
  221. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: pos can't be larger than node's inputs'size, the instance name is "
  222. << instance_name;
  223. }
  224. // Creat new node
  225. AnfNodePtr pre_node = node->input(IntToSize(pos));
  226. MS_EXCEPTION_IF_NULL(pre_node);
  227. InsertNode(op, node, IntToSize(pos), pre_node, func_graph, instance_name);
  228. }
  229. TensorLayout GetTensorInLayout(const CNodePtr &middle_node, const PrimitivePtr &middle_prim,
  230. const OperatorInfoPtr &distribute_operator) {
  231. TensorInfo tensorinfo_in;
  232. if (middle_prim->name() == TUPLE_GETITEM) {
  233. auto value_node = middle_node->input(2)->cast<ValueNodePtr>();
  234. MS_EXCEPTION_IF_NULL(value_node);
  235. size_t index_s = IntToSize(GetValue<int>(value_node->value()));
  236. if (index_s >= distribute_operator->outputs_tensor_info().size()) {
  237. MS_LOG(EXCEPTION) << "The index out of range, index: " << index_s
  238. << ", vector size: " << distribute_operator->outputs_tensor_info().size();
  239. }
  240. tensorinfo_in = distribute_operator->outputs_tensor_info()[index_s];
  241. } else {
  242. if (distribute_operator->outputs_tensor_info().empty()) {
  243. MS_LOG(EXCEPTION) << "The outputs tensor info is empty";
  244. }
  245. tensorinfo_in = distribute_operator->outputs_tensor_info()[0];
  246. }
  247. return tensorinfo_in.tensor_layout();
  248. }
  249. bool AnfNodeIsPrimitive(const AnfNodePtr &anf_node, const std::string &prim_name) {
  250. MS_EXCEPTION_IF_NULL(anf_node);
  251. auto cnode = anf_node->cast<CNodePtr>();
  252. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  253. return false;
  254. }
  255. auto value_node = cnode->input(0)->cast<ValueNodePtr>();
  256. auto prim = GetValueNode<PrimitivePtr>(value_node);
  257. MS_EXCEPTION_IF_NULL(prim);
  258. if (prim->name() == prim_name) {
  259. return true;
  260. }
  261. return false;
  262. }
  263. std::string GetPrimName(const CNodePtr &node) {
  264. MS_EXCEPTION_IF_NULL(node);
  265. if (!IsValueNode<Primitive>(node->input(0))) {
  266. MS_LOG(EXCEPTION) << "The node is not a primitive";
  267. }
  268. auto value_node = node->input(0)->cast<ValueNodePtr>();
  269. auto prim = GetValueNode<PrimitivePtr>(value_node);
  270. MS_EXCEPTION_IF_NULL(prim);
  271. return prim->name();
  272. }
  273. OperatorInfoPtr GetDistributeOperator(const CNodePtr &node) {
  274. MS_EXCEPTION_IF_NULL(node);
  275. if (!IsParallelCareNode(node)) {
  276. return nullptr;
  277. }
  278. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  279. if (distribute_operator == nullptr) {
  280. MS_LOG(EXCEPTION) << "Distribute operator is nullptr, the prim is " << GetPrimName(node);
  281. }
  282. return distribute_operator;
  283. }
  284. void Redistribution(const std::pair<AnfNodePtr, int> &node_pair, const OperatorInfoPtr &distribute_operator,
  285. const CNodePtr &middle_node, int index, TensorRedistribution tensor_redistribution,
  286. const CNodePtr &pre_node) {
  287. FuncGraphPtr func_graph = middle_node->func_graph();
  288. if (func_graph == nullptr) {
  289. MS_LOG(EXCEPTION) << "Redistribution:get graph failed";
  290. }
  291. CNodePtr next_node = node_pair.first->cast<CNodePtr>();
  292. MS_EXCEPTION_IF_NULL(next_node);
  293. auto middle_value = middle_node->input(0)->cast<ValueNodePtr>();
  294. MS_EXCEPTION_IF_NULL(middle_value);
  295. PrimitivePtr middle_prim = middle_value->value()->cast<PrimitivePtr>();
  296. MS_EXCEPTION_IF_NULL(middle_prim);
  297. OperatorInfoPtr next_distribute_operator = GetDistributeOperator(next_node);
  298. if (next_distribute_operator == nullptr) {
  299. MS_LOG(EXCEPTION) << "Failure: " << next_node->ToString() << " GetDistributeOperator failed";
  300. }
  301. RankList dev_list = distribute_operator->global_device_list();
  302. std::string next_prim_name = GetValueNode<PrimitivePtr>(next_node->input(0))->name();
  303. MS_LOG(DEBUG) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim " << next_prim_name;
  304. MS_LOG(DEBUG) << "Redistribution: middle_node " << middle_node->ToString() << " next_node " << next_node->ToString();
  305. // extract tensor layout in and out
  306. if (distribute_operator->outputs_tensor_info().empty()) {
  307. MS_LOG(WARNING) << "pre_node's tensorinfo_in is empty, operator name is " << distribute_operator->name();
  308. return;
  309. }
  310. if (IntToSize(index - 1) >= next_distribute_operator->inputs_tensor_info().size()) {
  311. MS_LOG(WARNING) << "The index is out of range, the index is " << index - 1 << ", the vector size is "
  312. << next_distribute_operator->inputs_tensor_info().size() << "next operator name is "
  313. << next_distribute_operator->name();
  314. return;
  315. }
  316. TensorInfo tensorinfo_out = next_distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  317. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  318. TensorLayout tensorlayout_in = GetTensorInLayout(middle_node, middle_prim, distribute_operator);
  319. if (tensor_redistribution.Init(tensorlayout_in, tensorlayout_out, dev_list) == FAILED) {
  320. MS_LOG(ERROR) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim : " << next_prim_name;
  321. MS_LOG(ERROR) << "Redistribution: middle_node " << middle_node->ToString() << " next_node "
  322. << next_node->ToString();
  323. DumpGraph(func_graph, "redistribution_error");
  324. MS_LOG(EXCEPTION) << "Failure:tensor_redistribution init failed";
  325. }
  326. RedistributionOpListPtr redistribution_oplist_ptr = tensor_redistribution.InferTensorRedistributionOperatorList();
  327. if (redistribution_oplist_ptr == nullptr) {
  328. MS_LOG(EXCEPTION) << "Failure:InferTensorRedistribution failed";
  329. }
  330. MS_LOG(DEBUG) << "Redistribution size " << redistribution_oplist_ptr->first.size();
  331. if (!redistribution_oplist_ptr->first.empty()) {
  332. // insert node before next node
  333. InsertRedistribution(redistribution_oplist_ptr, next_node, func_graph, node_pair.second, pre_node);
  334. }
  335. }
  336. bool StrategyFound(std::unordered_map<std::string, ValuePtr> attrs) {
  337. auto iter = attrs.find(STRATEGY);
  338. return !((iter == attrs.end()) || (iter->second->type_name() == NONE));
  339. }
  340. bool HasStrategy(const FuncGraphPtr &root) {
  341. AnfNodePtr ret = root->get_return();
  342. MS_EXCEPTION_IF_NULL(ret);
  343. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  344. for (auto &node : all_nodes) {
  345. auto cnode = node->cast<CNodePtr>();
  346. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  347. continue;
  348. }
  349. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  350. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  351. auto attrs = prim->attrs();
  352. if (StrategyFound(attrs)) {
  353. return true;
  354. }
  355. }
  356. return false;
  357. }
  358. bool IsCommunicationOp(const PrimitivePtr &prim) {
  359. MS_EXCEPTION_IF_NULL(prim);
  360. return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end());
  361. }
  362. bool FindCommunicationOp(const std::vector<AnfNodePtr> &all_nodes) {
  363. for (auto &node : all_nodes) {
  364. MS_EXCEPTION_IF_NULL(node);
  365. if (!node->isa<CNode>()) {
  366. continue;
  367. }
  368. auto cnode = node->cast<CNodePtr>();
  369. if (!IsValueNode<Primitive>(cnode->input(0))) {
  370. continue;
  371. }
  372. ValueNodePtr prim_value_node = cnode->input(0)->cast<ValueNodePtr>();
  373. MS_EXCEPTION_IF_NULL(prim_value_node);
  374. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_value_node);
  375. MS_EXCEPTION_IF_NULL(prim);
  376. if (IsCommunicationOp(prim) && cnode->in_forward_flag()) {
  377. MS_EXCEPTION_IF_NULL(prim_value_node->scope());
  378. MS_LOG(INFO) << "The graph contain communication op: " << prim->name() << ", scope name is "
  379. << prim_value_node->scope()->name();
  380. return true;
  381. }
  382. }
  383. return false;
  384. }
  385. bool IsParallelCareNode(const CNodePtr &cnode) {
  386. MS_EXCEPTION_IF_NULL(cnode);
  387. ValueNodePtr prim_node = cnode->input(0)->cast<ValueNodePtr>();
  388. if (prim_node == nullptr) {
  389. return false;
  390. }
  391. PrimitivePtr prim = prim_node->value()->cast<PrimitivePtr>();
  392. if (prim == nullptr) {
  393. return false;
  394. }
  395. if (IsInBlackList(prim)) {
  396. MS_LOG(INFO) << "Parallel don't care node: " << prim->name();
  397. return false;
  398. }
  399. // get_next is not in the forward graph, we need mark the get_next as the forward node
  400. if (prim->name() == GET_NEXT) {
  401. return true;
  402. }
  403. if ((prim->name() == CAST) && !cnode->has_user_data<OperatorInfo>()) {
  404. return false;
  405. }
  406. return cnode->in_forward_flag();
  407. }
  408. void StepRedistribution(const CNodePtr &node, const OperatorInfoPtr &distribute_operator, const CNodePtr &insert_node,
  409. const TensorRedistribution &tensor_redistribution, const CNodePtr &pre_node) {
  410. MS_EXCEPTION_IF_NULL(node->func_graph());
  411. FuncGraphManagerPtr manager = node->func_graph()->manager();
  412. MS_EXCEPTION_IF_NULL(manager);
  413. AnfNodeIndexSet node_set = manager->node_users()[node];
  414. CNodePtr insert_node_new;
  415. if (AnfNodeIsPrimitive(node, MAKE_TUPLE) || AnfNodeIsPrimitive(node, MAKE_LIST)) {
  416. MS_LOG(INFO) << "No need to insert redistribution op betweend make_tuple node and the next node";
  417. return;
  418. }
  419. if (IsValueNode<Primitive>(node->input(0))) {
  420. auto current_value = node->input(0)->cast<ValueNodePtr>();
  421. MS_EXCEPTION_IF_NULL(current_value);
  422. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  423. MS_EXCEPTION_IF_NULL(current_prim);
  424. insert_node_new = ((current_prim->name() == TUPLE_GETITEM) ? node : insert_node);
  425. } else {
  426. insert_node_new = insert_node;
  427. }
  428. MS_EXCEPTION_IF_NULL(insert_node_new);
  429. for (auto &node_pair : node_set) {
  430. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  431. MS_EXCEPTION_IF_NULL(use_cnode);
  432. if (!IsValueNode<Primitive>(use_cnode->input(0))) {
  433. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  434. } else {
  435. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  436. MS_EXCEPTION_IF_NULL(prim_anf_node);
  437. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  438. MS_EXCEPTION_IF_NULL(node_prim);
  439. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  440. continue;
  441. }
  442. if (IsParallelCareNode(use_cnode) && use_cnode->has_user_data<OperatorInfo>()) {
  443. Redistribution(node_pair, distribute_operator, insert_node_new, node_pair.second, tensor_redistribution,
  444. pre_node);
  445. } else {
  446. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  447. }
  448. }
  449. }
  450. }
  451. void SplitTensor(const AnfNodePtr &node, const CNodePtr &next_node, int index) {
  452. MS_EXCEPTION_IF_NULL(node);
  453. MS_EXCEPTION_IF_NULL(next_node);
  454. OperatorInfoPtr op_info = next_node->user_data<OperatorInfo>();
  455. MS_EXCEPTION_IF_NULL(op_info);
  456. // If the shape of tensor is [] or [1], no need to split it.
  457. Shapes shapes = GetNodeShape(node);
  458. if (shapes.size() != 1) {
  459. MS_LOG(EXCEPTION) << "Split tensor for " << op_info->name()
  460. << ": GetNodeShape for tensor_node, output size is not 1";
  461. }
  462. Shape shape = shapes[0];
  463. std::string shape_str = ShapeToString(shape);
  464. if (shape.empty() || ((shape.size() == 1) && (shape[0] == 1))) {
  465. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape is " << shape_str
  466. << ", no need to split it.";
  467. return;
  468. }
  469. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape of tensor is " << shape_str;
  470. // extract tensor layout
  471. if (IntToSize(index - 1) >= op_info->inputs_tensor_info().size()) {
  472. MS_LOG(EXCEPTION) << "The index is out of range, index is " << index - 1 << ", vector size is "
  473. << op_info->inputs_tensor_info().size();
  474. }
  475. TensorInfo tensor_info = op_info->inputs_tensor_info()[IntToSize(index - 1)];
  476. TensorLayout tensor_layout = tensor_info.tensor_layout();
  477. // Use _GetTensorSlice operator to split the tensor
  478. FuncGraphPtr func_graph = next_node->func_graph(); // only cnode can get the graph
  479. MS_EXCEPTION_IF_NULL(func_graph);
  480. Operator op = CreateGetTensorSliceOp(tensor_layout);
  481. InsertGetTensorSliceOp(op, next_node, func_graph, index, SPLIT_TENSOR);
  482. if (!op_info->sub_ops().empty()) {
  483. auto sub_ops = op_info->sub_ops();
  484. for (size_t i = 0; i < sub_ops.size(); i++) {
  485. if (!sub_ops.at(i).empty()) {
  486. InsertGetTensorSliceOp(sub_ops.at(i).at(0), next_node, func_graph, index, SUB);
  487. }
  488. }
  489. }
  490. }
  491. void StepSplitTensor(const AnfNodePtr &node, const FuncGraphManagerPtr &manager) {
  492. MS_EXCEPTION_IF_NULL(node);
  493. MS_EXCEPTION_IF_NULL(manager);
  494. AnfNodeIndexSet node_set = manager->node_users()[node];
  495. for (auto &node_pair : node_set) {
  496. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  497. if (use_cnode == nullptr || !IsValueNode<Primitive>(use_cnode->input(0))) {
  498. continue;
  499. }
  500. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  501. MS_EXCEPTION_IF_NULL(prim_anf_node);
  502. PrimitivePtr use_cnode_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  503. MS_EXCEPTION_IF_NULL(use_cnode_prim);
  504. if (use_cnode_prim->name() == DEPEND && node_pair.second != 1) {
  505. continue;
  506. }
  507. if (IsParallelCareNode(use_cnode)) {
  508. SplitTensor(node, use_cnode, node_pair.second);
  509. }
  510. }
  511. }
  512. std::vector<AnfNodePtr> ReplaceOpInput(const Operator &replace_op, const std::string &instance_name,
  513. const CNodePtr &node) {
  514. OperatorArgs arg_replace_op = replace_op.second;
  515. ValuePtr pyop_instance = CreatOpInstance(arg_replace_op.first, replace_op.first, instance_name);
  516. if (pyop_instance == nullptr) {
  517. MS_LOG(EXCEPTION) << "Failure: " << replace_op.first << " CreatOpInstance failed";
  518. }
  519. OperatorParams params = arg_replace_op.second;
  520. if (node->inputs().size() < 2) {
  521. // GetNext operator dose not has input
  522. if (node->inputs().size() == 1) {
  523. return {NewValueNode(pyop_instance)};
  524. }
  525. MS_LOG(EXCEPTION) << "Failure: " << node->ToString() << " size is smaller than 2";
  526. }
  527. std::vector<AnfNodePtr> replace_input = {NewValueNode(pyop_instance), node->input(1)};
  528. if (replace_op.first == EMBEDDING_LOOKUP) {
  529. replace_input = {NewValueNode(pyop_instance), node->input(1), node->input(2)};
  530. }
  531. if (!params.empty()) {
  532. Param param_first = *(params.begin());
  533. int32_t first_position = param_first.second;
  534. if (first_position == 1) {
  535. replace_input.pop_back();
  536. }
  537. for (auto &param : params) {
  538. AnfNodePtr val = NewValueNode(param.first.second);
  539. if (val == nullptr) {
  540. MS_LOG(EXCEPTION) << "Failure:val is nullptr";
  541. }
  542. int32_t position = param.second;
  543. (void)replace_input.insert(replace_input.begin() + position, val);
  544. }
  545. }
  546. return replace_input;
  547. }
  548. void ReplaceOneOp(const Operator &replace_op, const CNodePtr &node) {
  549. FuncGraphPtr func_graph = node->func_graph();
  550. MS_EXCEPTION_IF_NULL(func_graph);
  551. FuncGraphManagerPtr manager = func_graph->manager();
  552. if (manager == nullptr) {
  553. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  554. }
  555. std::string instance_name = CreateInstanceName(node, 0);
  556. std::vector<AnfNodePtr> replace_input;
  557. replace_input = ReplaceOpInput(replace_op, instance_name, node);
  558. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  559. MS_EXCEPTION_IF_NULL(replace_node);
  560. ScopePtr scope = node->scope();
  561. MS_EXCEPTION_IF_NULL(scope);
  562. replace_node->set_scope(scope);
  563. replace_node->set_in_forward_flag(true);
  564. replace_input[0]->set_scope(scope);
  565. (void)manager->Replace(node, replace_node);
  566. }
  567. void StepReplaceOp(OperatorVector replace_op, const CNodePtr &node) {
  568. // step1:get graph manager distribute_operator
  569. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  570. if (distribute_operator == nullptr) {
  571. MS_LOG(EXCEPTION) << "Failure:AddNode error since distribute_operator is nullptr";
  572. }
  573. FuncGraphPtr func_graph = node->func_graph();
  574. MS_EXCEPTION_IF_NULL(func_graph);
  575. FuncGraphManagerPtr manager = func_graph->manager();
  576. if (manager == nullptr) {
  577. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  578. }
  579. // step2:traverse op_list and insert node
  580. std::reverse(replace_op.begin(), replace_op.end());
  581. auto replace_op_info = distribute_operator->replace_op_info();
  582. std::reverse(replace_op_info.begin(), replace_op_info.end());
  583. if (!replace_op_info.empty() && replace_op_info.size() != replace_op.size()) {
  584. MS_LOG(EXCEPTION) << "replace_op_info is not empty and size not equal to replace_op!";
  585. }
  586. bool replace_op_info_flag = !replace_op_info.empty();
  587. for (size_t index = 0; index < replace_op.size(); ++index) {
  588. std::string instance_name = CreateInstanceName(node, index);
  589. std::vector<AnfNodePtr> replace_input;
  590. if (index != replace_op.size() - 1) {
  591. replace_input = CreateInput(replace_op[index], node, instance_name);
  592. } else {
  593. replace_input = ReplaceOpInput(replace_op[index], instance_name, node);
  594. }
  595. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  596. MS_EXCEPTION_IF_NULL(replace_node);
  597. ScopePtr scope = node->scope();
  598. MS_EXCEPTION_IF_NULL(scope);
  599. replace_node->set_scope(scope);
  600. PrimitivePtr prim = GetValueNode<PrimitivePtr>(replace_node->input(0));
  601. if (prim->name() == EMBEDDING_LOOKUP) {
  602. auto attrs = prim->attrs();
  603. attrs[TARGET] = MakeValue(CPU);
  604. (void)prim->SetAttrs(attrs);
  605. }
  606. if (index == replace_op.size() - 1) {
  607. replace_node->set_user_data<OperatorInfo>(node->user_data<OperatorInfo>());
  608. }
  609. replace_node->set_in_forward_flag(true);
  610. replace_input[0]->set_scope(scope);
  611. if (replace_op_info_flag && replace_op_info[index].first) {
  612. auto new_cnode = InsertMakeTuple(replace_node, replace_op_info[index].second, func_graph);
  613. (void)manager->Replace(node, new_cnode); // using Replace function to insert node
  614. } else {
  615. (void)manager->Replace(node, replace_node); // using Replace function to insert node
  616. }
  617. }
  618. MS_LOG(INFO) << "Insert ReplaceOp success for " << distribute_operator->name();
  619. }
  620. bool IsSomePrimitive(const CNodePtr &cnode, const std::string &name) {
  621. ValueNodePtr anf_node = cnode->input(0)->cast<ValueNodePtr>();
  622. MS_EXCEPTION_IF_NULL(anf_node);
  623. PrimitivePtr prim = anf_node->value()->cast<PrimitivePtr>();
  624. return (prim->name() == name);
  625. }
  626. void StepReplaceGraph(const ReplaceGraphPtr &replace_graph, const CNodePtr &node) {
  627. MS_EXCEPTION_IF_NULL(replace_graph);
  628. MS_EXCEPTION_IF_NULL(node);
  629. MS_EXCEPTION_IF_NULL(replace_graph->second);
  630. FuncGraphPtr func_graph = node->func_graph();
  631. MS_EXCEPTION_IF_NULL(func_graph);
  632. FuncGraphManagerPtr manager = func_graph->manager();
  633. if (manager == nullptr) {
  634. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  635. }
  636. for (auto &replace_input : replace_graph->first) {
  637. auto pre_node = node->input(IntToSize(replace_input.second));
  638. manager->SetEdge(replace_input.first, 1, pre_node);
  639. }
  640. // "(void)manager->Replace(replace_graph->first, pre_node);" can not be called
  641. auto replace_output = replace_graph->second;
  642. MS_EXCEPTION_IF_NULL(replace_output);
  643. (void)manager->Replace(node, replace_output);
  644. }
  645. int32_t GetTupleGetItemIndex(const CNodePtr &cnode) {
  646. MS_EXCEPTION_IF_NULL(cnode);
  647. if (cnode->inputs().size() != 3) {
  648. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is not 3";
  649. }
  650. if (!cnode->input(2)->isa<ValueNode>()) {
  651. MS_LOG(EXCEPTION) << "The index of tuple getitem is not a value node";
  652. }
  653. ValuePtr tuple_index_value = GetValueNode(cnode->input(2));
  654. MS_EXCEPTION_IF_NULL(tuple_index_value);
  655. if (!tuple_index_value->isa<Int32Imm>()) {
  656. MS_LOG(EXCEPTION) << "The index of tuple getitem is not int32";
  657. }
  658. return tuple_index_value->cast<Int32ImmPtr>()->value();
  659. }
  660. void InsertVirtualDivOp(const VirtualDivOp &virtual_div_op, const CNodePtr &node) {
  661. MS_EXCEPTION_IF_NULL(node);
  662. size_t node_size = node->inputs().size();
  663. FuncGraphPtr func_graph = node->func_graph();
  664. MS_EXCEPTION_IF_NULL(func_graph);
  665. FuncGraphManagerPtr manager = func_graph->manager();
  666. MS_EXCEPTION_IF_NULL(manager);
  667. for (size_t index = 1; index < node_size; ++index) {
  668. AnfNodePtr input = node->input(index);
  669. MS_EXCEPTION_IF_NULL(input);
  670. if (!input->isa<CNode>() && !input->isa<Parameter>()) { // if it is not a tensor, continue
  671. MS_LOG(INFO) << "insert div op: the index " << index << " is not tensor, skip";
  672. continue;
  673. }
  674. for (size_t pos = 0; pos < virtual_div_op.size(); ++pos) {
  675. std::string instance_name = CreateInstanceName(node, pos);
  676. InsertNode(virtual_div_op[pos], node, index, node->input(index), func_graph, instance_name);
  677. }
  678. MS_LOG(INFO) << "insert div op for input index " << index << " of node";
  679. }
  680. }
  681. std::pair<AnfNodePtr, bool> FindParameter(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  682. if (!node->isa<Parameter>() && !node->isa<CNode>() && !node->isa<ValueNode>()) {
  683. return std::make_pair(nullptr, false);
  684. } else if (node->isa<Parameter>()) {
  685. return std::make_pair(node, false);
  686. } else if (node->isa<ValueNode>()) {
  687. if (IsValueNode<RefKey>(node)) {
  688. std::vector<AnfNodePtr> param_v = FindParameterByRefKeyNode(node, func_graph);
  689. if (param_v.size() != 1) {
  690. MS_LOG(EXCEPTION) << "FindParameterByRefKeyNode failed, return vector size must be 1, real is "
  691. << param_v.size();
  692. }
  693. return std::make_pair(node, true);
  694. }
  695. return std::make_pair(nullptr, false);
  696. } else {
  697. CNodePtr cnode = node->cast<CNodePtr>();
  698. MS_EXCEPTION_IF_NULL(cnode);
  699. if (!IsValueNode<Primitive>(cnode->input(0))) {
  700. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  701. if (!FindParameter(cnode->input(index), func_graph).first) {
  702. continue;
  703. }
  704. return FindParameter(cnode->input(index), func_graph);
  705. }
  706. } else {
  707. if (IsParallelCareNode(cnode)) {
  708. return std::make_pair(nullptr, false);
  709. } else {
  710. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  711. MS_EXCEPTION_IF_NULL(prim_anf_node);
  712. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  713. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  714. MS_EXCEPTION_IF_NULL(prim);
  715. if (prim->name() == DEPEND && index != 1) {
  716. continue;
  717. }
  718. if (!FindParameter(cnode->input(index), func_graph).first) {
  719. continue;
  720. }
  721. return FindParameter(cnode->input(index), func_graph);
  722. }
  723. }
  724. }
  725. }
  726. return std::make_pair(nullptr, false);
  727. }
  728. std::pair<bool, CNodePtr> FindCNode(const AnfNodePtr &anode, const std::string &name, const FuncGraphPtr &func_graph) {
  729. MS_EXCEPTION_IF_NULL(anode);
  730. MS_EXCEPTION_IF_NULL(anode->func_graph());
  731. FuncGraphManagerPtr manager = anode->func_graph()->manager();
  732. MS_EXCEPTION_IF_NULL(manager);
  733. AnfNodeIndexSet node_set = manager->node_users()[anode];
  734. bool result = false;
  735. CNodePtr cnode_return = nullptr;
  736. for (auto &node_pair : node_set) {
  737. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  738. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  739. continue;
  740. }
  741. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  742. MS_EXCEPTION_IF_NULL(prim_anf_node);
  743. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  744. MS_EXCEPTION_IF_NULL(node_prim);
  745. if (node_prim->name() == name && node_pair.second == 1) {
  746. if (use_apply->func_graph() == func_graph) {
  747. result = true;
  748. cnode_return = use_apply;
  749. MS_LOG(INFO) << "Find Primitive " << name << " in the same func_graph";
  750. continue;
  751. }
  752. MS_LOG(INFO) << "Find Primitive " << name << " in different func_graph";
  753. }
  754. }
  755. return std::make_pair(result, cnode_return);
  756. }
  757. bool IsCastBeforMirror(const CNodePtr &node, size_t index) {
  758. // only if gradient_fp32_sync is true, pre node is cast and type is not float32 return true
  759. if (!ParallelContext::GetInstance()->gradient_fp32_sync()) {
  760. return false;
  761. }
  762. auto pre_node = node->input(index);
  763. MS_EXCEPTION_IF_NULL(pre_node);
  764. auto cnode = pre_node->cast<CNodePtr>();
  765. if (cnode == nullptr || !IsValueNode<Primitive>(cnode->input(0))) {
  766. return false;
  767. }
  768. auto pre_value_node = cnode->input(0)->cast<ValueNodePtr>();
  769. MS_EXCEPTION_IF_NULL(pre_value_node);
  770. auto pre_prim = pre_value_node->value()->cast<PrimitivePtr>();
  771. MS_EXCEPTION_IF_NULL(pre_prim);
  772. if (pre_prim->name() != CAST) {
  773. return false;
  774. }
  775. auto node_type = pre_node->Type();
  776. MS_EXCEPTION_IF_NULL(node_type);
  777. if (!node_type->isa<mindspore::TensorType>()) {
  778. MS_LOG(EXCEPTION) << "Unknown type.";
  779. }
  780. auto input_element_type = node_type->cast<mindspore::TensorTypePtr>()->element();
  781. MS_EXCEPTION_IF_NULL(input_element_type);
  782. auto type_id = input_element_type->type_id();
  783. return (type_id != kNumberTypeFloat32);
  784. }
  785. void InsertMirrorOps(const MirrorOps &mirror_ops, const CNodePtr &node) {
  786. MS_EXCEPTION_IF_NULL(node);
  787. size_t node_size = node->inputs().size();
  788. FuncGraphPtr func_graph = node->func_graph();
  789. MS_EXCEPTION_IF_NULL(func_graph);
  790. FuncGraphManagerPtr manager = func_graph->manager();
  791. MS_EXCEPTION_IF_NULL(manager);
  792. if ((node->inputs().size() == 2) &&
  793. (AnfNodeIsPrimitive(node->input(1), MAKE_TUPLE) || AnfNodeIsPrimitive(node->input(1), MAKE_LIST))) {
  794. MS_LOG(INFO) << "The mirror for " << GetPrimName(node) << " has handle by make_tuple node";
  795. return;
  796. }
  797. if (mirror_ops.size() != node_size - 1) {
  798. MS_LOG(EXCEPTION) << "Mirrorops's size is wrong! mirror_ops size is " << mirror_ops.size() << ", node_size is "
  799. << node_size - 1;
  800. }
  801. for (size_t index = 1; index < node_size; ++index) {
  802. OperatorVector backward_op = mirror_ops[index - 1];
  803. if (backward_op.empty()) {
  804. continue;
  805. }
  806. std::pair<AnfNodePtr, bool> param_node_pair = FindParameter(node->input(index), func_graph);
  807. if (!param_node_pair.first) {
  808. continue;
  809. }
  810. // not a RefKey
  811. if (!param_node_pair.second) {
  812. auto next_cnode = FindCNode(param_node_pair.first, MIRROR_OPERATOR, func_graph);
  813. // if there is already a MirrorOp in the same graph, use MirrorOp CNode as a input instead
  814. if (next_cnode.first) {
  815. MS_EXCEPTION_IF_NULL(next_cnode.second);
  816. // param->cast->op, insert mirror before cast
  817. if (node->input(index)->isa<CNode>()) {
  818. auto pre_cnode = node->input(index)->cast<CNodePtr>();
  819. auto pre_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  820. if (pre_prim->name() == CAST) {
  821. manager->SetEdge(pre_cnode, 1, next_cnode.second);
  822. continue;
  823. }
  824. }
  825. manager->SetEdge(node, SizeToInt(index), next_cnode.second);
  826. continue;
  827. }
  828. }
  829. // if the parameter found is a RefKey, or no MirrorOp is found in the same graph, insert a new MirrorOp
  830. // only one MirrorOp in backward_op
  831. if (backward_op.size() != 1) {
  832. MS_LOG(EXCEPTION) << "backward_op size must be 1, real is " << backward_op.size();
  833. }
  834. std::string instance_name = MIRROR_OP;
  835. if (IsCastBeforMirror(node, index)) {
  836. for (auto &op : backward_op) {
  837. // insert new node before the node
  838. CNodePtr cnode = node->input(index)->cast<CNodePtr>();
  839. MS_EXCEPTION_IF_NULL(cnode);
  840. AnfNodePtr pre_node = cnode->input(1);
  841. InsertNode(op, cnode, size_t(1), pre_node, func_graph, instance_name);
  842. }
  843. } else {
  844. for (auto &op : backward_op) {
  845. AnfNodePtr pre_node = node->input(index);
  846. InsertNode(op, node, index, pre_node, func_graph, instance_name);
  847. }
  848. }
  849. }
  850. }
  851. void BackwardCommunication(const OperatorInfoPtr &distribute_operator, const CNodePtr &node,
  852. const std::vector<std::pair<CNodePtr, LossNodeInfo>> &sens_loss_pairs) {
  853. MS_EXCEPTION_IF_NULL(distribute_operator);
  854. MS_EXCEPTION_IF_NULL(node);
  855. bool is_loss_cnode =
  856. std::any_of(sens_loss_pairs.begin(), sens_loss_pairs.end(),
  857. [node](const std::pair<CNodePtr, LossNodeInfo> &element) { return element.second.loss_node == node; });
  858. MirrorOps mirror_ops = distribute_operator->mirror_ops();
  859. VirtualDivOp virtual_div_op = distribute_operator->virtual_div_op();
  860. // insert mirror op
  861. if (!mirror_ops.empty() && !distribute_operator->opt_shard_flag()) {
  862. MS_LOG(INFO) << "insert mirror op for " << distribute_operator->name();
  863. InsertMirrorOps(mirror_ops, node);
  864. }
  865. // insert virtual div op
  866. if (!virtual_div_op.empty() && is_loss_cnode) {
  867. MS_LOG(INFO) << "insert virtual div op for " << distribute_operator->name();
  868. InsertVirtualDivOp(virtual_div_op, node);
  869. }
  870. }
  871. std::string GetDisOpName(const std::string &prim_name) {
  872. std::string op_name = prim_name;
  873. if (!prim_name.empty() && (prim_name[0] == '_')) {
  874. op_name = prim_name.substr(1);
  875. }
  876. return op_name + "Info";
  877. }
  878. OperatorInfoPtr OperatorInstanceByName(const std::string &name, const PrimitiveAttrs &attrs,
  879. const std::vector<Shapes> &shape_list) {
  880. if (shape_list.size() != 2) {
  881. MS_LOG(ERROR) << "The size of shape list is not 2";
  882. return nullptr;
  883. }
  884. if (name.length() == 0) {
  885. MS_LOG(EXCEPTION) << "Length of name is zero!";
  886. }
  887. std::string distribute_opname = GetDisOpName(name);
  888. if (name == GATHERV2) {
  889. distribute_opname = name + "PInfo";
  890. auto data_parallel_iter = attrs.find(DATA_PARALLEL);
  891. if (data_parallel_iter != attrs.end()) {
  892. MS_EXCEPTION_IF_NULL(data_parallel_iter->second);
  893. if (!data_parallel_iter->second->isa<BoolImm>()) {
  894. MS_LOG(EXCEPTION) << ": data_parallel flag's type is not a bool.";
  895. }
  896. bool data_parallel = data_parallel_iter->second->cast<BoolImmPtr>()->value();
  897. if (data_parallel) {
  898. distribute_opname = name + "Info";
  899. }
  900. }
  901. }
  902. OperatorInfoPtr operator_ =
  903. (OperatorInfoPtr)DynCreator::Instance().Creat(distribute_opname, shape_list[0], shape_list[1], attrs, TOTAL_OPS);
  904. if (operator_ == nullptr) {
  905. MS_LOG(INFO) << "Creat " << name << " failed";
  906. return nullptr;
  907. }
  908. std::string origin_name = operator_->name();
  909. operator_->set_name(origin_name + std::to_string(TOTAL_OPS));
  910. MS_LOG(INFO) << "Successfully created operator " << origin_name;
  911. ++TOTAL_OPS;
  912. return operator_;
  913. }
  914. OperatorInfoPtr OperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  915. const std::vector<Shapes> &shape_list) {
  916. MS_EXCEPTION_IF_NULL(prim);
  917. OperatorInfoPtr operator_ = OperatorInstanceByName(prim->name(), attrs, shape_list);
  918. if (operator_ == nullptr) {
  919. if (IsInBatchParallelBlackList(prim)) {
  920. MS_LOG(EXCEPTION) << "Operator " << prim->name() << " is not supported yet in auto parallel mode.";
  921. }
  922. MS_LOG(INFO) << "Creat " << prim->name() << " failed, use batch parallel";
  923. operator_ = OperatorInstanceByName(BATCH_PARALLEL, attrs, shape_list);
  924. MS_EXCEPTION_IF_NULL(operator_);
  925. }
  926. return operator_;
  927. }
  928. OperatorInfoPtr NewOperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  929. std::vector<Shapes> shape_list) {
  930. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  931. for (size_t i = 0; i < shape_list[0].size(); ++i) {
  932. MS_LOG(INFO) << "No: " << i << " input's shape: " << ShapeToString(shape_list[0][i]);
  933. }
  934. return operator_;
  935. }
  936. StrategyPtr ExtractStrategy(std::unordered_map<std::string, ValuePtr> attrs) {
  937. ValueTuplePtr var = attrs[STRATEGY]->cast<ValueTuplePtr>();
  938. StrategyPtr strategyPtr;
  939. std::vector<int32_t> stages = ParallelContext::GetInstance()->stage();
  940. auto res = attrs.find(STAGE_ATTR);
  941. int32_t stage_id = 0;
  942. if (res != attrs.end()) {
  943. stage_id = GetValue<int>(res->second);
  944. }
  945. if (stage_id && stages.empty()) {
  946. MS_LOG(ERROR) << "Find stage id:" << stage_id << " but the pipeline_stages is 0.";
  947. return nullptr;
  948. }
  949. MS_LOG(INFO) << "Extract information: strategy " << attrs[STRATEGY]->ToString();
  950. if (var == nullptr) {
  951. MS_LOG(EXCEPTION) << "Strategy value is nullptr";
  952. }
  953. if (var->size() > 0) {
  954. std::vector<ValuePtr> elements = var->value();
  955. Strategys strategy;
  956. for (uint32_t index = 0; index < elements.size(); ++index) {
  957. Dimensions dim;
  958. if (elements[index]->isa<ValueSequeue>()) {
  959. ValueTuplePtr value_tuple = elements[index]->cast<ValueTuplePtr>();
  960. std::vector<ValuePtr> value_vector = value_tuple->value();
  961. (void)std::transform(
  962. value_vector.begin(), value_vector.end(), std::back_inserter(dim), [](const ValuePtr &value) {
  963. return value->isa<Int64Imm>() ? GetValue<int64_t>(value) : static_cast<int64_t>(GetValue<int>(value));
  964. });
  965. strategy.push_back(dim);
  966. } else {
  967. MS_LOG(EXCEPTION) << "Failure:Strategy's format is wrong! Need ValueSequence";
  968. }
  969. }
  970. if (strategy.empty()) {
  971. MS_LOG(EXCEPTION) << "ExtractStrategy:failed to extract strategy";
  972. }
  973. strategyPtr = NewStrategy(stage_id, strategy);
  974. }
  975. return strategyPtr;
  976. }
  977. Shapes GetNodeShape(const AnfNodePtr &node) {
  978. MS_EXCEPTION_IF_NULL(node);
  979. Shapes shapes;
  980. BaseShapePtr base_shape_ptr = node->Shape();
  981. if (node->isa<CNode>()) {
  982. auto cnode = node->cast<CNodePtr>();
  983. if (IsValueNode<Primitive>(cnode->input(0))) {
  984. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  985. MS_EXCEPTION_IF_NULL(prim);
  986. if (prim->name() == MAKEREF) {
  987. AnfNodePtr ref_node = cnode->input(1);
  988. auto func_graph = cnode->func_graph();
  989. MS_EXCEPTION_IF_NULL(ref_node);
  990. MS_EXCEPTION_IF_NULL(func_graph);
  991. return GetRefKeyNodeShape(ref_node, func_graph);
  992. }
  993. }
  994. if (cnode->input(0)->isa<CNode>()) {
  995. if (cnode->inputs().size() < 2) {
  996. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " size is samller than 2";
  997. }
  998. base_shape_ptr = cnode->input(1)->Shape();
  999. }
  1000. }
  1001. if (base_shape_ptr == nullptr) {
  1002. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " shape_ptr is nullptr, full name is "
  1003. << node->fullname_with_scope();
  1004. }
  1005. auto tuple_shape_ptr = dyn_cast<abstract::SequeueShape>(base_shape_ptr);
  1006. if (tuple_shape_ptr != nullptr) {
  1007. auto tuple_shape = tuple_shape_ptr->shape();
  1008. for (auto &shape : tuple_shape) {
  1009. auto each_shape = dyn_cast<abstract::Shape>(shape);
  1010. MS_EXCEPTION_IF_NULL(each_shape);
  1011. std::vector<int> shape_int = each_shape->shape();
  1012. Shape new_shape;
  1013. (void)std::transform(shape_int.begin(), shape_int.end(), std::back_inserter(new_shape),
  1014. [](const int &value) { return static_cast<int64_t>(value); });
  1015. shapes.push_back(new_shape);
  1016. }
  1017. } else {
  1018. auto shape_ptr = dyn_cast<abstract::Shape>(base_shape_ptr);
  1019. MS_EXCEPTION_IF_NULL(shape_ptr);
  1020. std::vector<int> shape_int = shape_ptr->shape();
  1021. Shape new_shape;
  1022. (void)std::transform(shape_int.begin(), shape_int.end(), std::back_inserter(new_shape),
  1023. [](const int &value) { return static_cast<int64_t>(value); });
  1024. shapes.push_back(new_shape);
  1025. }
  1026. return shapes;
  1027. }
  1028. std::vector<AnfNodePtr> FindParameterByRefKeyNode(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1029. MS_EXCEPTION_IF_NULL(node);
  1030. MS_EXCEPTION_IF_NULL(func_graph);
  1031. std::vector<AnfNodePtr> parameters;
  1032. if (!IsValueNode<RefKey>(node)) {
  1033. MS_LOG(ERROR) << "The node is not a ref key";
  1034. return parameters;
  1035. }
  1036. auto ref_key = GetValueNode<RefKeyPtr>(node);
  1037. MS_EXCEPTION_IF_NULL(ref_key);
  1038. auto name = ref_key->tag();
  1039. auto manager = func_graph->manager();
  1040. MS_EXCEPTION_IF_NULL(manager);
  1041. auto roots = manager->roots();
  1042. if (roots.size() != 1) {
  1043. MS_LOG(ERROR) << "The size of roots ( " << roots.size() << " ) is not 1";
  1044. return parameters;
  1045. }
  1046. FuncGraphPtr root_g = roots.back();
  1047. MS_EXCEPTION_IF_NULL(root_g);
  1048. for (auto &param_node : root_g->parameters()) {
  1049. auto param = param_node->cast<ParameterPtr>();
  1050. if (param && (name == param->name())) {
  1051. parameters.push_back(param_node);
  1052. MS_LOG(INFO) << "The name of ref key is: " << name;
  1053. return parameters;
  1054. }
  1055. }
  1056. MS_LOG(ERROR) << "The name of ref key is: " << name << ", but have not found the parameter";
  1057. return parameters;
  1058. }
  1059. Shapes GetRefKeyNodeShape(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1060. MS_EXCEPTION_IF_NULL(node);
  1061. MS_EXCEPTION_IF_NULL(func_graph);
  1062. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(node, func_graph);
  1063. if (parameters.size() != 1) {
  1064. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1065. }
  1066. Shapes input_shapes;
  1067. input_shapes = GetNodeShape(parameters[0]);
  1068. if (input_shapes.size() != 1) {
  1069. MS_LOG(EXCEPTION) << "Get input shape failed";
  1070. }
  1071. MS_LOG(INFO) << "The parameter shape is " << ShapeToString(input_shapes[0]);
  1072. return input_shapes;
  1073. }
  1074. std::vector<Shapes> ExtractShape(const CNodePtr &node) {
  1075. MS_EXCEPTION_IF_NULL(node);
  1076. Shapes shape_inputs, shape_outputs;
  1077. std::vector<Shapes> shape_all;
  1078. std::vector<AnfNodePtr> all_inputs = node->inputs();
  1079. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  1080. size_t inputs_size = all_inputs.size();
  1081. for (size_t i = 1; i < inputs_size; ++i) {
  1082. Shapes input_shapes;
  1083. AnfNodePtr input = all_inputs[i];
  1084. if (IsValueNode<RefKey>(input)) {
  1085. auto func_graph = node->func_graph();
  1086. MS_EXCEPTION_IF_NULL(func_graph);
  1087. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(input, func_graph);
  1088. if (parameters.size() != 1) {
  1089. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1090. }
  1091. std::pair<AnfNodePtr, int> node_pair = std::make_pair(node, SizeToInt(i));
  1092. g_RefMap[parameters[0]] = node_pair;
  1093. input_shapes = GetRefKeyNodeShape(input, func_graph);
  1094. } else if (IsValueNode<Tensor>(input) || input->isa<CNode>() || input->isa<Parameter>()) {
  1095. input_shapes = GetNodeShape(input);
  1096. } else {
  1097. continue;
  1098. }
  1099. if (input_shapes.size() != 1) {
  1100. if (inputs_size == 2) { // like concat
  1101. shape_inputs = input_shapes;
  1102. break;
  1103. } else {
  1104. MS_LOG(EXCEPTION) << "ExtractShape: Get input shape failed";
  1105. }
  1106. }
  1107. shape_inputs.push_back(input_shapes[0]);
  1108. }
  1109. shape_all.push_back(shape_inputs);
  1110. // extract out shape
  1111. shape_outputs = GetNodeShape(node);
  1112. shape_all.push_back(shape_outputs);
  1113. return shape_all;
  1114. }
  1115. std::pair<AnfNodePtr, int> FindParallelCareNode(const AnfNodePtr &node) {
  1116. MS_EXCEPTION_IF_NULL(node);
  1117. FuncGraphPtr func_graph = node->func_graph();
  1118. MS_EXCEPTION_IF_NULL(func_graph);
  1119. FuncGraphManagerPtr manager = func_graph->manager();
  1120. MS_EXCEPTION_IF_NULL(manager);
  1121. AnfNodeIndexSet node_set = manager->node_users()[node];
  1122. for (auto &node_pair : node_set) {
  1123. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1124. MS_EXCEPTION_IF_NULL(cnode);
  1125. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1126. continue;
  1127. }
  1128. ValueNodePtr prim_node_anf = cnode->input(0)->cast<ValueNodePtr>();
  1129. MS_EXCEPTION_IF_NULL(prim_node_anf);
  1130. PrimitivePtr node_prim = prim_node_anf->value()->cast<PrimitivePtr>();
  1131. MS_EXCEPTION_IF_NULL(node_prim);
  1132. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1133. continue;
  1134. }
  1135. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1136. return node_pair;
  1137. } else if (FindParallelCareNode(node_pair.first).first != nullptr) {
  1138. return FindParallelCareNode(node_pair.first);
  1139. }
  1140. }
  1141. return std::make_pair(nullptr, 0);
  1142. }
  1143. std::pair<AnfNodePtr, int> FindSubGraph(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  1144. MS_EXCEPTION_IF_NULL(graph);
  1145. MS_EXCEPTION_IF_NULL(parameter);
  1146. FuncGraphManagerPtr manager = graph->manager();
  1147. MS_EXCEPTION_IF_NULL(manager);
  1148. std::pair<AnfNodePtr, int> prim_anf_node_pair = FindParallelCareNode(parameter);
  1149. if (prim_anf_node_pair.first != nullptr) {
  1150. return prim_anf_node_pair;
  1151. } else {
  1152. AnfNodeIndexSet param_sub_set = manager->node_users()[parameter];
  1153. for (auto &param_pair : param_sub_set) {
  1154. CNodePtr param_cnode = param_pair.first->cast<CNodePtr>();
  1155. AnfNodePtr graph_value_node;
  1156. if (param_cnode->input(0)->isa<CNode>()) {
  1157. graph_value_node = param_cnode->input(0)->cast<CNodePtr>()->input(1);
  1158. } else {
  1159. graph_value_node = param_cnode->input(0);
  1160. }
  1161. if (!IsValueNode<FuncGraph>(graph_value_node)) {
  1162. continue;
  1163. }
  1164. FuncGraphPtr graph_sub = GetValueNode<FuncGraphPtr>(graph_value_node);
  1165. auto parameters = graph_sub->parameters();
  1166. if (IntToSize(param_pair.second - 1) >= parameters.size()) {
  1167. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1168. << parameters.size();
  1169. }
  1170. std::pair<AnfNodePtr, int> res = FindSubGraph(graph_sub, parameters[IntToSize(param_pair.second - 1)]);
  1171. if (res.first != nullptr) {
  1172. return res;
  1173. }
  1174. }
  1175. }
  1176. return std::make_pair(nullptr, 0);
  1177. }
  1178. void ApplyParallelOptOnParam(TensorLayout *tensor_layout, const OperatorInfoPtr &distribute_operator,
  1179. const CNodePtr &cnode, const AnfNodePtr &parameter, size_t index) {
  1180. MS_EXCEPTION_IF_NULL(distribute_operator);
  1181. MS_EXCEPTION_IF_NULL(cnode);
  1182. MS_EXCEPTION_IF_NULL(parameter);
  1183. std::vector<Group> dev_group;
  1184. // create communication group for allgather operator
  1185. if (distribute_operator->CreateGroupByTensorMap(tensor_layout->origin_tensor_map().array(), &dev_group) ==
  1186. Status::SUCCESS &&
  1187. !dev_group.empty()) {
  1188. // set optimizer shard split flag to avoid inserting mirror_ops
  1189. distribute_operator->set_opt_shard_flag(true);
  1190. // insert allgather operator between shard parameter and cnode
  1191. Operator op = CreateAllGatherOp(dev_group[0].name());
  1192. auto graph = cnode->func_graph();
  1193. MS_EXCEPTION_IF_NULL(graph);
  1194. InsertNode(op, cnode, index, parameter, graph, PARALLEL_OPTIMIZER_ALLGATHER);
  1195. // set communication group in tensor layout for checkpoint saving
  1196. tensor_layout->set_opt_shard_group(dev_group[0].name());
  1197. // add fusion flag
  1198. auto allgather = cnode->input(index)->cast<CNodePtr>();
  1199. auto prim = GetValueNode<PrimitivePtr>(allgather->input(0));
  1200. auto attrs = prim->attrs();
  1201. // enable fusion flag later when it's supported in backend
  1202. attrs["fusion"] = MakeValue(0);
  1203. prim->SetAttrs(attrs);
  1204. MS_LOG(INFO) << "Parallel optimizer is applied on " << parameter->ToString();
  1205. } else {
  1206. MS_LOG(ERROR) << "Parallel optimizer applied on " << parameter->ToString() << "failed!";
  1207. }
  1208. }
  1209. void SetParallelShape(const AnfNodePtr &parameter, const std::pair<AnfNodePtr, int> &res) {
  1210. MS_EXCEPTION_IF_NULL(parameter);
  1211. AbstractBasePtr abstract = parameter->abstract();
  1212. MS_EXCEPTION_IF_NULL(abstract);
  1213. MS_LOG(DEBUG) << "SetParallelShape " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1214. CNodePtr cnode = res.first->cast<CNodePtr>();
  1215. MS_EXCEPTION_IF_NULL(cnode);
  1216. OperatorInfoPtr distribute_operator = cnode->user_data<OperatorInfo>();
  1217. if (distribute_operator == nullptr) {
  1218. MS_LOG(EXCEPTION) << "Failure:node " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1219. }
  1220. if (IntToSize(res.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1221. MS_LOG(EXCEPTION) << "The index is out of range, index is " << res.second - 1 << ", vector size is "
  1222. << distribute_operator->inputs_tensor_info().size();
  1223. }
  1224. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(res.second - 1)];
  1225. TensorLayout tensor_layout = tensorinfo_in.tensor_layout();
  1226. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1227. bool enable_parallel_optimizer = ParallelContext::GetInstance()->enable_parallel_optimizer();
  1228. Shape slice_shape = tensor_layout.slice_shape().array();
  1229. if (enable_parallel_optimizer) {
  1230. if (!ParameterRequireGrad(parameter)) {
  1231. // only trainable parameters need parallel optimizer
  1232. MS_LOG(INFO) << "Parallel optimizer is no need for " << parameter->ToString();
  1233. } else if (tensor_layout.GenerateOptShardSliceShape() == Status::SUCCESS) {
  1234. // get a totally shard tensor slice shape if the weight is repeated on devices
  1235. // and the shape of the first dimension could be divided
  1236. // apply parallel optimizer on parameters
  1237. ApplyParallelOptOnParam(&tensor_layout, distribute_operator, cnode, parameter, IntToSize(res.second));
  1238. slice_shape = tensor_layout.opt_shard_slice_shape();
  1239. }
  1240. }
  1241. MS_LOG(INFO) << "SetParallelShape slice_shape " << parameter->ToString() << " shape "
  1242. << MakeValue(slice_shape)->ToString() << ", op name is " << distribute_operator->name();
  1243. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1244. MS_EXCEPTION_IF_NULL(parallel_shape);
  1245. // Don't modify it in-place as the pointer of this AbstractValue may used as cache key in StaticAnalysis.
  1246. auto cloned_abstract = abstract->Clone();
  1247. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1248. cloned_abstract->set_shape(parallel_shape);
  1249. parameter->set_abstract(cloned_abstract);
  1250. ParameterPtr parameter_ptr = parameter->cast<ParameterPtr>();
  1251. MS_EXCEPTION_IF_NULL(parameter_ptr);
  1252. parameter_ptr->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(tensor_layout));
  1253. }
  1254. void CoverSliceShape(const FuncGraphPtr &root) {
  1255. MS_EXCEPTION_IF_NULL(root);
  1256. auto parameters = root->parameters();
  1257. for (auto &parameter : parameters) {
  1258. MS_EXCEPTION_IF_NULL(parameter->Shape());
  1259. auto iter = g_RefMap.find(parameter);
  1260. if (iter != g_RefMap.end()) {
  1261. SetParallelShape(parameter, g_RefMap[parameter]);
  1262. continue;
  1263. }
  1264. std::pair<AnfNodePtr, int> res = FindSubGraph(root, parameter);
  1265. if (res.first == nullptr) {
  1266. MS_LOG(INFO) << "Parameter " << parameter->ToString() << " don't need to set parallel shape";
  1267. } else {
  1268. SetParallelShape(parameter, res);
  1269. MS_LOG(DEBUG) << "Parameter " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1270. }
  1271. }
  1272. g_RefMap.clear();
  1273. }
  1274. bool ParameterIsCloned(const AnfNodePtr &parameter_node) {
  1275. MS_EXCEPTION_IF_NULL(parameter_node);
  1276. auto cloned_parameter = parameter_node->cast<ParameterPtr>();
  1277. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1278. // find the clone parameter
  1279. if (!cloned_parameter->has_default()) {
  1280. return false;
  1281. }
  1282. auto param_value = cloned_parameter->param_info();
  1283. if (param_value == nullptr) {
  1284. return false;
  1285. }
  1286. bool cloned = param_value->cloned();
  1287. if (!cloned) {
  1288. return false;
  1289. }
  1290. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name() << " is cloned";
  1291. return true;
  1292. }
  1293. void SetClonedTensorShapeForOptimizer(const FuncGraphPtr &root) {
  1294. MS_EXCEPTION_IF_NULL(root);
  1295. for (auto &cloned_parameter_node : root->parameters()) {
  1296. MS_EXCEPTION_IF_NULL(cloned_parameter_node);
  1297. auto cloned_parameter = cloned_parameter_node->cast<ParameterPtr>();
  1298. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1299. if (!ParameterIsCloned(cloned_parameter_node)) {
  1300. continue;
  1301. }
  1302. auto param_value = cloned_parameter->param_info();
  1303. if (param_value == nullptr) {
  1304. continue;
  1305. }
  1306. // get the cloned index
  1307. int32_t cloned_index = param_value->cloned_index();
  1308. // find the be cloned parameter
  1309. bool found_be_cloned_parameter = false;
  1310. ParameterPtr cloned_from_parameter = nullptr;
  1311. AnfNodePtr cloned_from_node = nullptr;
  1312. for (auto &be_cloned_parameter_node : root->parameters()) {
  1313. MS_EXCEPTION_IF_NULL(be_cloned_parameter_node);
  1314. auto be_cloned_parameter = be_cloned_parameter_node->cast<ParameterPtr>();
  1315. MS_EXCEPTION_IF_NULL(be_cloned_parameter);
  1316. if (!be_cloned_parameter->has_default()) {
  1317. continue;
  1318. }
  1319. auto param_value_in = be_cloned_parameter->param_info();
  1320. if (param_value_in == nullptr) {
  1321. continue;
  1322. }
  1323. if (!param_value_in->be_cloned()) {
  1324. continue;
  1325. }
  1326. // get the be cloned index
  1327. auto &be_cloned_index = param_value_in->be_cloned_index();
  1328. if (std::find(be_cloned_index.begin(), be_cloned_index.end(), cloned_index) != be_cloned_index.end()) {
  1329. found_be_cloned_parameter = true;
  1330. cloned_from_parameter = be_cloned_parameter;
  1331. cloned_from_node = be_cloned_parameter_node;
  1332. }
  1333. }
  1334. if (found_be_cloned_parameter) {
  1335. // set the shape and tensor layout for cloned parameter
  1336. cloned_parameter->set_user_data<TensorLayout>(cloned_from_parameter->user_data<TensorLayout>());
  1337. MS_EXCEPTION_IF_NULL(cloned_parameter_node->abstract());
  1338. MS_EXCEPTION_IF_NULL(cloned_from_node->abstract());
  1339. auto cloned_abstract = cloned_parameter_node->abstract()->Clone();
  1340. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1341. cloned_abstract->set_shape(cloned_from_node->abstract()->GetShapeTrack());
  1342. cloned_parameter_node->set_abstract(cloned_abstract);
  1343. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name()
  1344. << " is cloned, the be cloned parameter is: " << cloned_from_parameter->name()
  1345. << ", clone index is: " << cloned_index;
  1346. } else {
  1347. MS_LOG(EXCEPTION) << "The parameter: " << cloned_parameter->name() << " is cloned, cloned index is "
  1348. << cloned_index << ", but not found the be cloned parameter";
  1349. }
  1350. }
  1351. }
  1352. void SetVirtualDatasetStrategy(const CNodePtr &node) {
  1353. MS_EXCEPTION_IF_NULL(node);
  1354. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1355. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1356. PrimitivePtr prim = GetValueNode<PrimitivePtr>(node->input(0));
  1357. MS_EXCEPTION_IF_NULL(prim);
  1358. if (prim->name() == VIRTUAL_DATA_SET) {
  1359. CheckGlobalDeviceManager();
  1360. int32_t dev_num;
  1361. if (full_batch) {
  1362. dev_num = 1;
  1363. } else {
  1364. dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1365. }
  1366. auto attrs_temp = prim->attrs();
  1367. std::vector<Shapes> shape_list = ExtractShape(node);
  1368. if (shape_list.empty()) {
  1369. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1370. }
  1371. std::vector<ValuePtr> elements;
  1372. for (size_t i = 0; i < shape_list[0].size(); i++) {
  1373. if (shape_list[0][i].empty()) {
  1374. MS_LOG(EXCEPTION) << "shape_list[ " << i << " ].size() is zero";
  1375. }
  1376. Dimensions input_strategy = {dev_num};
  1377. for (size_t j = 1; j < shape_list[0][i].size(); j++) {
  1378. input_strategy.push_back(1);
  1379. }
  1380. elements.push_back(MakeValue(input_strategy));
  1381. }
  1382. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1383. attrs_temp[STRATEGY] = strategy;
  1384. (void)prim->SetAttrs(attrs_temp);
  1385. }
  1386. }
  1387. // This function aims to check the valid rank and stage in the operations
  1388. // If the rank is not valid for the given stage, we chose not to init the strategy of the operation
  1389. // For example stage is [4, 4], and the group_list [[0,1,2,3],[4,5,6,7]]
  1390. // For stage 0, we require the rank_id is in [0,1,2,3]
  1391. Status ValidRankCheck(int32_t global_rank, int32_t strategy_stage) {
  1392. RankList local_group_list = g_device_manager->GetDeviceListByStageId(strategy_stage);
  1393. int32_t target = global_rank;
  1394. if (std::any_of(local_group_list.begin(), local_group_list.end(), [target](int32_t a) { return a == target; })) {
  1395. return Status::SUCCESS;
  1396. }
  1397. return Status::FAILED;
  1398. }
  1399. Status ValidStageCheck(const std::vector<int32_t> &stages, int32_t strategy_stage) {
  1400. if (stages.size() > 0) {
  1401. if (strategy_stage >= 0 && strategy_stage < (int32_t)stages.size()) {
  1402. return Status::SUCCESS;
  1403. }
  1404. return Status::FAILED;
  1405. } else {
  1406. return Status::SUCCESS;
  1407. }
  1408. }
  1409. void ExtractInformation(const std::vector<AnfNodePtr> &all_nodes) {
  1410. // load strategy map from checkpoint
  1411. StrategyMap stra_map;
  1412. if (StrategyCheckpoint::GetInstance().LoadCheckPointOn()) {
  1413. if (StrategyCheckpoint::GetInstance().Load(&stra_map) != SUCCESS) {
  1414. MS_LOG(EXCEPTION) << "Load strategy checkpoint failed";
  1415. }
  1416. }
  1417. // Get global rank after the checkpoint?
  1418. int32_t global_rank = ParallelContext::GetInstance()->global_rank();
  1419. std::vector<int32_t> stages = ParallelContext::GetInstance()->stage();
  1420. for (auto &node : all_nodes) {
  1421. auto cnode = node->cast<CNodePtr>();
  1422. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1423. continue;
  1424. }
  1425. SetVirtualDatasetStrategy(cnode);
  1426. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1427. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1428. if (prim->name() == MAKE_TUPLE || prim->name() == MAKE_LIST) {
  1429. continue;
  1430. }
  1431. auto attrs = prim->attrs();
  1432. MS_LOG(INFO) << "extract information: node: " << node->ToString() << " prim " << prim->name();
  1433. if (IsParallelCareNode(cnode)) {
  1434. std::vector<Shapes> shape_list = ExtractShape(cnode);
  1435. if (shape_list.empty()) {
  1436. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1437. }
  1438. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1439. if (operator_ == nullptr) {
  1440. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->name() << " OperatorInstance failed";
  1441. }
  1442. auto &inputs = cnode->inputs();
  1443. std::vector<ValuePtr> input_value;
  1444. for (size_t index = 1; index < inputs.size(); ++index) {
  1445. if (inputs[index]->isa<ValueNode>()) {
  1446. input_value.push_back(GetValueNode(inputs[index]));
  1447. } else {
  1448. input_value.emplace_back(nullptr);
  1449. }
  1450. }
  1451. StrategyPtr strategyPtr = nullptr;
  1452. (*operator_).set_input_value(input_value);
  1453. (*operator_).set_outputs_dtype(cnode->Type());
  1454. (*operator_).set_cnode(cnode);
  1455. if (prim->name() == RESHAPE) {
  1456. cnode->set_user_data<OperatorInfo>(operator_);
  1457. continue;
  1458. }
  1459. // load strategy checkpoint
  1460. // key of strategy map
  1461. std::string strategy_key_name = "";
  1462. auto param_names = NodeParameterName(cnode);
  1463. if (!param_names.empty()) {
  1464. strategy_key_name = prim->name() + "_" + param_names[0].first;
  1465. }
  1466. bool load_strategy_from_ckpt =
  1467. StrategyCheckpoint::GetInstance().LoadCheckPointOn() && stra_map.find(strategy_key_name) != stra_map.end();
  1468. if (!StrategyFound(attrs) && !load_strategy_from_ckpt) {
  1469. MS_LOG(INFO) << "ExtractInformation: the strategy of node " << node->ToString() << " prim " << prim->name()
  1470. << " is empty, using batch parallel";
  1471. std::shared_ptr<Strategys> strategy_v_ptr = operator_->GenerateBatchStrategies();
  1472. if (strategy_v_ptr == nullptr) {
  1473. MS_LOG(EXCEPTION) << "Failure:Generate batch parallel strategy failed";
  1474. }
  1475. std::vector<ValuePtr> elements;
  1476. for (size_t i = 0; i < strategy_v_ptr->size(); i++) {
  1477. elements.push_back(MakeValue((*strategy_v_ptr)[i]));
  1478. }
  1479. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1480. // display the strategy generated by batch parallel
  1481. attrs[GEN_STRATEGY] = strategy;
  1482. (void)prim->SetAttrs(attrs);
  1483. MS_LOG(INFO) << "node " << node->ToString() << " prim " << prim->name() << " batch parallel strategy is "
  1484. << attrs[GEN_STRATEGY]->ToString();
  1485. strategyPtr = NewStrategy(0, *strategy_v_ptr);
  1486. } else if (load_strategy_from_ckpt) {
  1487. strategyPtr = stra_map[strategy_key_name];
  1488. } else {
  1489. strategyPtr = ExtractStrategy(attrs);
  1490. }
  1491. if (strategyPtr != nullptr) {
  1492. (*operator_).set_stage_id(strategyPtr->GetInputStage());
  1493. MS_LOG(INFO) << "Extract stage id for op " << prim->name() << " is " << (*operator_).stage_id();
  1494. if (ValidStageCheck(stages, (*operator_).stage_id()) == FAILED) {
  1495. MS_LOG(ERROR) << "Find stage " << strategyPtr->GetInputStage() << " for operator " << prim->name()
  1496. << " exceeds the global stage size " << stages.size() << '.';
  1497. return;
  1498. }
  1499. // If the strategy is not valid for the given global rank, then we skip the Init of the strategy
  1500. if (ValidRankCheck(global_rank, (*operator_).stage_id()) == FAILED) {
  1501. MS_LOG(INFO) << "Find global exceeds the range of the stage, skip the strategy init for operator "
  1502. << prim->name();
  1503. } else if (operator_->Init(strategyPtr) == FAILED) {
  1504. MS_LOG(EXCEPTION) << "Failure:operator " << prim->name() << " init failed";
  1505. }
  1506. cnode->set_user_data<OperatorInfo>(operator_);
  1507. } else {
  1508. MS_LOG(EXCEPTION) << "ERROR:strategy_ptr is nullptr";
  1509. }
  1510. }
  1511. }
  1512. }
  1513. TensorLayout GetInputLayoutFromCNode(const std::pair<AnfNodePtr, int> &node_pair) {
  1514. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1515. MS_EXCEPTION_IF_NULL(cnode);
  1516. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1517. MS_EXCEPTION_IF_NULL(distribute_operator);
  1518. int index = node_pair.second;
  1519. if (index > SizeToInt(distribute_operator->inputs_tensor_info().size())) {
  1520. MS_LOG(EXCEPTION) << "The index is out of range, the node_pair.second is " << index - 1 << ", the vector size is "
  1521. << distribute_operator->inputs_tensor_info().size();
  1522. }
  1523. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  1524. TensorLayout tensorlayout_in = tensorinfo_in.tensor_layout();
  1525. return tensorlayout_in;
  1526. }
  1527. // if reshape's output connect to several primitive, return the first layout found
  1528. std::shared_ptr<TensorLayout> FindNextLayout(const CNodePtr &cnode) {
  1529. MS_EXCEPTION_IF_NULL(cnode);
  1530. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  1531. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  1532. MS_EXCEPTION_IF_NULL(manager);
  1533. AnfNodeIndexSet node_set = manager->node_users()[cnode];
  1534. for (auto &node_pair : node_set) {
  1535. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1536. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1537. continue;
  1538. }
  1539. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1540. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1541. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1542. MS_EXCEPTION_IF_NULL(node_prim);
  1543. MS_LOG(INFO) << "FindNextLayout prim " << node_prim->name();
  1544. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1545. continue;
  1546. }
  1547. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1548. MS_LOG(INFO) << "FindNextLayout success prim " << node_prim->name();
  1549. auto layout = GetInputLayoutFromCNode(node_pair);
  1550. return std::make_shared<TensorLayout>(layout);
  1551. }
  1552. MS_LOG(DEBUG) << "FindNextLayout failed prim " << node_prim->name() << " " << IsParallelCareNode(use_apply)
  1553. << " " << use_apply->has_user_data<OperatorInfo>();
  1554. auto layout_ptr = FindNextLayout(use_apply);
  1555. if (layout_ptr) {
  1556. return layout_ptr;
  1557. }
  1558. }
  1559. MS_LOG(WARNING) << "FindNextLayout return nullptr, if reshape is not the last primitive, there must be some error";
  1560. return nullptr;
  1561. }
  1562. std::shared_ptr<TensorLayout> GetOutputLayoutFromCNode(const CNodePtr &cnode, size_t output_index) {
  1563. MS_EXCEPTION_IF_NULL(cnode);
  1564. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1565. MS_EXCEPTION_IF_NULL(distribute_operator);
  1566. if (distribute_operator->outputs_tensor_info().size() < output_index) {
  1567. MS_LOG(EXCEPTION) << "outputs_tensor_info size is " << distribute_operator->inputs_tensor_info().size()
  1568. << ", must be less than output_index " << output_index;
  1569. }
  1570. TensorInfo tensorinfo_out = distribute_operator->outputs_tensor_info()[output_index];
  1571. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  1572. return std::make_shared<TensorLayout>(tensorlayout_out);
  1573. }
  1574. std::shared_ptr<TensorLayout> FindPrevParallelCareNodeLayout(const AnfNodePtr &node, size_t output_index) {
  1575. if (!node->isa<CNode>()) {
  1576. return nullptr;
  1577. }
  1578. CNodePtr cnode = node->cast<CNodePtr>();
  1579. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1580. return nullptr;
  1581. }
  1582. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1583. auto layout_ptr = GetOutputLayoutFromCNode(cnode, output_index);
  1584. if (!layout_ptr) {
  1585. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1586. }
  1587. return layout_ptr;
  1588. }
  1589. return nullptr;
  1590. }
  1591. std::shared_ptr<TensorLayout> FindParameterNextLayout(const AnfNodePtr &node) {
  1592. FuncGraphManagerPtr manager = node->func_graph()->manager();
  1593. MS_EXCEPTION_IF_NULL(manager);
  1594. AnfNodeIndexSet node_set = manager->node_users()[node];
  1595. for (auto &node_pair : node_set) {
  1596. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1597. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1598. continue;
  1599. }
  1600. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1601. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1602. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1603. MS_EXCEPTION_IF_NULL(node_prim);
  1604. if ((node_prim->name() == DEPEND && node_pair.second != 1) || node_prim->name() == RESHAPE) {
  1605. continue;
  1606. }
  1607. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1608. auto layout = GetInputLayoutFromCNode(node_pair);
  1609. return std::make_shared<TensorLayout>(layout);
  1610. }
  1611. }
  1612. return nullptr;
  1613. }
  1614. std::shared_ptr<TensorLayout> CreateParameterLayout(const AnfNodePtr &node) {
  1615. // Create DataParallel tensor layout for parameter(support WideDeep).
  1616. auto next_layout = FindParameterNextLayout(node);
  1617. if (next_layout != nullptr) {
  1618. return next_layout;
  1619. }
  1620. CheckGlobalDeviceManager();
  1621. int32_t dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1622. TensorLayout input_tensor_layout;
  1623. // create input_shape
  1624. Shapes inputs_shape = GetNodeShape(node);
  1625. Shape input_shape_array = inputs_shape[0];
  1626. if (input_shape_array.empty()) {
  1627. MS_LOG(EXCEPTION) << "Don't support reshape a scalar parameter.";
  1628. }
  1629. // create tensor_map
  1630. size_t shape_size = input_shape_array.size();
  1631. TensorMap input_tensor_map_array(SizeToInt(shape_size) - 1, -1);
  1632. input_tensor_map_array.insert(input_tensor_map_array.begin(), 0);
  1633. // create dev_matrix
  1634. Shape dev_matrix_array = {dev_num};
  1635. if (input_tensor_layout.InitFromVector(dev_matrix_array, input_tensor_map_array, input_shape_array) != SUCCESS) {
  1636. MS_LOG(EXCEPTION) << "Create tensor layout for parameter failed.";
  1637. }
  1638. return std::make_shared<TensorLayout>(input_tensor_layout);
  1639. }
  1640. RedistributionOpListPtr InferSensRedistribution(const AnfNodePtr &node, const TensorLayout &loss_layout) {
  1641. MS_EXCEPTION_IF_NULL(node);
  1642. TensorRedistribution tensor_redistribution;
  1643. // create stand alone layout:TensorMap:[all -1],dev_matrix:[dev_num].
  1644. CheckGlobalDeviceManager();
  1645. int32_t dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1646. TensorLayout stand_alone_layout;
  1647. Shapes inputs_shape = GetNodeShape(node);
  1648. if (inputs_shape.empty()) {
  1649. MS_LOG(EXCEPTION) << "InferSensRedistribution failed cause inputs shape is empty.";
  1650. }
  1651. Shape input_shape_array = inputs_shape[0];
  1652. if (input_shape_array.empty()) {
  1653. MS_LOG(INFO) << "No need to redistribution for sens.";
  1654. return nullptr;
  1655. }
  1656. // TensorMap
  1657. TensorMap stand_alone_tensor_map_array(SizeToInt(input_shape_array.size()), -1);
  1658. // Dev_matrix
  1659. Shape dev_matrix_array = {dev_num};
  1660. if (stand_alone_layout.InitFromVector(dev_matrix_array, stand_alone_tensor_map_array, input_shape_array) == FAILED) {
  1661. MS_LOG(EXCEPTION) << "Create tensor layout for Sens failed.";
  1662. }
  1663. // Infer Redistribution op list for stand alone and loss layout.
  1664. RankList dev_list = g_device_manager->GetDeviceListByStageId(0);
  1665. if (tensor_redistribution.Init(stand_alone_layout, loss_layout, dev_list) == FAILED) {
  1666. MS_LOG(EXCEPTION) << "Redistribution for Sens init failed.";
  1667. }
  1668. RedistributionOpListPtr sens_redistribution_list = tensor_redistribution.InferTensorRedistributionOperatorList();
  1669. MS_EXCEPTION_IF_NULL(sens_redistribution_list);
  1670. return sens_redistribution_list;
  1671. }
  1672. std::shared_ptr<TensorLayout> FindPrevLayout(const AnfNodePtr &node) {
  1673. if (node->isa<Parameter>()) {
  1674. return CreateParameterLayout(node);
  1675. }
  1676. if (!node->isa<CNode>()) {
  1677. return nullptr;
  1678. }
  1679. CNodePtr cnode = node->cast<CNodePtr>();
  1680. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1681. return nullptr;
  1682. }
  1683. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1684. auto layout_ptr = GetOutputLayoutFromCNode(cnode, 0);
  1685. if (!layout_ptr) {
  1686. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1687. }
  1688. return layout_ptr;
  1689. }
  1690. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1691. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1692. if (prim->name() == TUPLE_GETITEM) {
  1693. auto tuple_index = GetTupleGetItemIndex(cnode);
  1694. auto layout_ptr = FindPrevParallelCareNodeLayout(cnode->input(1), IntToSize(tuple_index));
  1695. if (!layout_ptr) {
  1696. MS_LOG(EXCEPTION)
  1697. << " Failure:FindPrevLayout failed, tuple_getitem before reshape, but there does not exit a parallel care node "
  1698. "before tuple_getitem!";
  1699. }
  1700. return layout_ptr;
  1701. }
  1702. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1703. if (prim->name() == DEPEND && index != 1) {
  1704. continue;
  1705. }
  1706. auto layout_ptr = FindPrevLayout(cnode->inputs()[index]);
  1707. if (!layout_ptr) {
  1708. continue;
  1709. }
  1710. return layout_ptr;
  1711. }
  1712. MS_LOG(WARNING) << "FindPrevLayout return nullptr, if reshape is not the first primitive, there must be some error";
  1713. return nullptr;
  1714. }
  1715. void ReshapeInit(const std::vector<AnfNodePtr> &all_nodes) {
  1716. for (auto &node : all_nodes) {
  1717. auto cnode = node->cast<CNodePtr>();
  1718. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1719. continue;
  1720. }
  1721. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1722. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  1723. continue;
  1724. }
  1725. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1726. MS_EXCEPTION_IF_NULL(prim);
  1727. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  1728. if (operator_info == nullptr) {
  1729. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->ToString() << " OperatorInstance is nullptr";
  1730. }
  1731. if (prim->name() != RESHAPE) {
  1732. continue;
  1733. }
  1734. auto attrs = prim->attrs();
  1735. if (StrategyFound(attrs)) {
  1736. MS_LOG(EXCEPTION) << "Setting strategy for Reshape goes for nothing!";
  1737. }
  1738. MS_ASSERT(cnode->inputs().size() == 3);
  1739. auto prev_layout_ptr = FindPrevLayout(cnode->input(1));
  1740. if (prev_layout_ptr) {
  1741. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1742. reshape_info_ptr->SetInputLayout(*prev_layout_ptr);
  1743. }
  1744. auto next_layout_ptr = FindNextLayout(cnode);
  1745. if (next_layout_ptr) {
  1746. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1747. reshape_info_ptr->SetOutputLayout(*next_layout_ptr);
  1748. }
  1749. if (operator_info->Init(nullptr) == FAILED) {
  1750. MS_LOG(EXCEPTION) << "Failure:operator " << prim->ToString() << " init failed";
  1751. }
  1752. }
  1753. }
  1754. CNodePtr HandleDependLoss(const CNodePtr &cnode) {
  1755. // Handle return->depend->loss
  1756. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1757. MS_EXCEPTION_IF_NULL(prim);
  1758. if (prim->name() == DEPEND) {
  1759. auto depend_before = cnode->input(1)->cast<CNodePtr>();
  1760. MS_EXCEPTION_IF_NULL(depend_before);
  1761. return HandleDependLoss(depend_before);
  1762. }
  1763. return cnode;
  1764. }
  1765. LossNodeInfo FindLossCNode(const FuncGraphPtr &func_graph) {
  1766. LossNodeInfo loss_node_info;
  1767. MS_EXCEPTION_IF_NULL(func_graph);
  1768. CNodePtr return_node = func_graph->get_return();
  1769. MS_EXCEPTION_IF_NULL(return_node);
  1770. if (return_node->size() < 2) {
  1771. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  1772. }
  1773. AnfNodePtr pre_node = return_node->input(1);
  1774. MS_EXCEPTION_IF_NULL(pre_node);
  1775. auto pre_cnode = pre_node->cast<CNodePtr>();
  1776. if (pre_cnode == nullptr || !IsValueNode<Primitive>(pre_cnode->input(0))) {
  1777. return loss_node_info;
  1778. }
  1779. if (!IsValueNode<Primitive>(pre_cnode->input(0))) {
  1780. MS_LOG(DEBUG) << "pre_cnode:" << pre_cnode->ToString();
  1781. return loss_node_info;
  1782. }
  1783. auto prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1784. // return -> cast
  1785. if (prim->name() == CAST && !pre_cnode->has_user_data<OperatorInfo>()) {
  1786. pre_cnode = pre_cnode->input(1)->cast<CNodePtr>();
  1787. MS_EXCEPTION_IF_NULL(pre_cnode);
  1788. }
  1789. pre_cnode = HandleDependLoss(pre_cnode);
  1790. auto current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1791. // notice: the GetNext op has not input
  1792. if (INVALID_LOSS_OPS.find(current_prim->name()) != INVALID_LOSS_OPS.end()) {
  1793. MS_LOG(INFO) << "The loss is: " << current_prim->name();
  1794. loss_node_info.loss_node = pre_cnode;
  1795. return loss_node_info;
  1796. }
  1797. // size of common cnode is larger than 1
  1798. if (pre_cnode->size() < 2) {
  1799. MS_LOG(EXCEPTION) << pre_cnode->ToString() << " size( " << pre_cnode->inputs().size() << " ) is smaller than 2";
  1800. }
  1801. // return -> tuple_getitem -> loss
  1802. if (current_prim->name() == TUPLE_GETITEM) {
  1803. auto tuple_index = GetTupleGetItemIndex(pre_cnode);
  1804. AnfNodePtr pre_pre_node = pre_cnode->input(1);
  1805. MS_EXCEPTION_IF_NULL(pre_pre_node);
  1806. auto pre_pre_cnode = pre_pre_node->cast<CNodePtr>();
  1807. loss_node_info.has_tuple_getitem = true;
  1808. loss_node_info.dout_index = tuple_index;
  1809. loss_node_info.loss_node = pre_pre_cnode;
  1810. return loss_node_info;
  1811. }
  1812. // return -> make_tuple
  1813. if (current_prim->name() == MAKE_TUPLE) {
  1814. MS_LOG(WARNING) << "The loss have make_tuple, it is not supported";
  1815. return loss_node_info;
  1816. }
  1817. // return -> loss
  1818. loss_node_info.loss_node = pre_cnode;
  1819. MS_LOG(DEBUG) << "The loss name is " << current_prim->name();
  1820. return loss_node_info;
  1821. }
  1822. TensorLayouts GetLossNodeGradOutputLayout(const LossNodeInfo &node_info) {
  1823. TensorLayouts ret;
  1824. auto loss_cnode = node_info.loss_node;
  1825. MS_EXCEPTION_IF_NULL(loss_cnode);
  1826. ValueNodePtr prim_anf_node = loss_cnode->input(0)->cast<ValueNodePtr>();
  1827. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1828. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1829. MS_EXCEPTION_IF_NULL(prim);
  1830. if (INVALID_LOSS_OPS.find(prim->name()) != INVALID_LOSS_OPS.end()) {
  1831. MS_LOG(WARNING) << "The loss name is: " << prim->name() << ", do nothing for split sens now";
  1832. return ret;
  1833. }
  1834. OperatorInfoPtr operator_info = loss_cnode->user_data<OperatorInfo>();
  1835. MS_EXCEPTION_IF_NULL(operator_info);
  1836. TensorInfo loss_grad_tensor_info;
  1837. size_t op_output_size = operator_info->outputs_tensor_info().size();
  1838. MS_LOG(INFO) << "The loss name is " << operator_info->name() << ", the has tuple item is "
  1839. << node_info.has_tuple_getitem << ", the output size is " << op_output_size << ", the dout_index is "
  1840. << node_info.dout_index;
  1841. if ((op_output_size == 0) || (op_output_size <= IntToSize(node_info.dout_index))) {
  1842. MS_LOG(EXCEPTION) << "The index is " << node_info.dout_index << ", but the size of outputs is " << op_output_size;
  1843. }
  1844. if (!node_info.has_tuple_getitem && (op_output_size > 1)) {
  1845. MS_LOG(EXCEPTION) << "Currently, it is not supported that the sens is a tuple.";
  1846. }
  1847. loss_grad_tensor_info = operator_info->outputs_tensor_info()[IntToSize(node_info.dout_index)];
  1848. ret.push_back(loss_grad_tensor_info.tensor_layout());
  1849. return ret;
  1850. }
  1851. void SplitSens(const CNodePtr &grad_sens_node, const TensorLayout &loss_grad_layout) {
  1852. MS_EXCEPTION_IF_NULL(grad_sens_node);
  1853. if (grad_sens_node->size() <= 1) {
  1854. MS_LOG(EXCEPTION) << "The size of grad sens node is smaller than 2";
  1855. }
  1856. AnfNodePtr sens_tensor_node = grad_sens_node->input(1);
  1857. MS_EXCEPTION_IF_NULL(sens_tensor_node);
  1858. Shapes sens_shapes = GetNodeShape(sens_tensor_node);
  1859. if (sens_shapes.size() != 1) {
  1860. MS_LOG(EXCEPTION) << "GetNodeShape for sens_tensor_node, output size is not 1";
  1861. }
  1862. // If the shape of sens tensor is [] or [1], no need to split it.
  1863. Shape sens_shape = sens_shapes[0];
  1864. if (sens_shape.empty() || ((sens_shape.size() == 1) && (sens_shape[0] == 1))) {
  1865. if (sens_tensor_node->isa<Parameter>()) {
  1866. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1867. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1868. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  1869. }
  1870. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", no need to split sens";
  1871. return;
  1872. }
  1873. auto loss_shape = loss_grad_layout.tensor_shape().array();
  1874. if (loss_shape != sens_shape) {
  1875. MS_LOG(EXCEPTION) << "The shape of sens is not equal to loss output, it is unsupported now. Sens shape is "
  1876. << ShapeToString(sens_shape) << ", loss shape is " << ShapeToString(loss_shape);
  1877. }
  1878. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", split it.";
  1879. if (!IsValueNode<Tensor>(sens_tensor_node)) {
  1880. if (sens_tensor_node->isa<Parameter>()) {
  1881. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1882. AbstractBasePtr abstract = sens_tensor_node->abstract();
  1883. MS_EXCEPTION_IF_NULL(abstract);
  1884. auto slice_shape = loss_grad_layout.slice_shape().array();
  1885. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1886. MS_EXCEPTION_IF_NULL(parallel_shape);
  1887. auto cloned_abstract = abstract->Clone();
  1888. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1889. cloned_abstract->set_shape(parallel_shape);
  1890. sens_tensor_node->set_abstract(cloned_abstract);
  1891. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1892. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  1893. return;
  1894. }
  1895. if (sens_tensor_node->isa<CNode>()) {
  1896. auto op_list_ptr = InferSensRedistribution(sens_tensor_node, loss_grad_layout);
  1897. if (op_list_ptr == nullptr) {
  1898. return;
  1899. }
  1900. auto sens_tensor_cnode = sens_tensor_node->cast<CNodePtr>();
  1901. auto func_graph = grad_sens_node->func_graph();
  1902. MS_EXCEPTION_IF_NULL(func_graph);
  1903. InsertRedistribution(op_list_ptr, grad_sens_node, func_graph, 1, sens_tensor_cnode);
  1904. return;
  1905. }
  1906. MS_LOG(EXCEPTION) << "The type of sens node is not Tensor or Parameter or CNode, it is unsupported now.";
  1907. }
  1908. // Use _GetTensorSlice operator to split the sens tensor
  1909. FuncGraphPtr func_graph = grad_sens_node->func_graph(); // only cnode can get the graph
  1910. MS_EXCEPTION_IF_NULL(func_graph);
  1911. Operator op = CreateGetTensorSliceOp(loss_grad_layout);
  1912. InsertGetTensorSliceOp(op, grad_sens_node, func_graph, 1, SPLIT_SENS);
  1913. }
  1914. void InsertForwardOps(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1915. MS_EXCEPTION_IF_NULL(distribute_operator);
  1916. MS_EXCEPTION_IF_NULL(cnode);
  1917. OperatorVector forward_op = distribute_operator->forward_op();
  1918. if (!forward_op.empty()) {
  1919. MS_LOG(INFO) << "Insert forward op for " << distribute_operator->name();
  1920. ForwardCommunication(forward_op, cnode);
  1921. }
  1922. }
  1923. void StepReplace(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1924. MS_EXCEPTION_IF_NULL(distribute_operator);
  1925. MS_EXCEPTION_IF_NULL(cnode);
  1926. // StepReplaceOp
  1927. OperatorVector replace_op = distribute_operator->replace_op();
  1928. if (!replace_op.empty()) {
  1929. MS_LOG(INFO) << "StepReplaceOp " << cnode->ToString();
  1930. StepReplaceOp(replace_op, cnode);
  1931. }
  1932. // StepReplaceGraph: after calling StepReplaceGraph, cnode can not be used anymore.
  1933. ReplaceGraphPtr replace_graph = distribute_operator->replace_graph(cnode);
  1934. if (!replace_op.empty() && replace_graph) {
  1935. MS_LOG(EXCEPTION) << "Only one of replace_op or replace_op can be used";
  1936. }
  1937. if (replace_graph) {
  1938. MS_LOG(INFO) << "StepReplaceGraph " << cnode->ToString();
  1939. StepReplaceGraph(replace_graph, cnode);
  1940. }
  1941. }
  1942. void HandleDropoutNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1943. MS_EXCEPTION_IF_NULL(distribute_operator);
  1944. MS_EXCEPTION_IF_NULL(cnode);
  1945. std::string op_name = distribute_operator->name();
  1946. if (op_name.find(DROPOUT_DO_MASK) == std::string::npos) {
  1947. return;
  1948. }
  1949. DropoutDoMaskInfoPtr dropout_do_mask = std::dynamic_pointer_cast<DropoutDoMaskInfo>(distribute_operator);
  1950. MS_EXCEPTION_IF_NULL(dropout_do_mask);
  1951. std::vector<Operator> replace_op = dropout_do_mask->GetDropoutGenMaskReplaceOp(cnode);
  1952. if (replace_op.empty()) {
  1953. MS_LOG(DEBUG) << "No need to replace dropout_gen_mask";
  1954. return;
  1955. }
  1956. if (cnode->inputs().size() != DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  1957. MS_LOG(EXCEPTION) << "The size of drop out do mask cnode's input is not " << DROPOUT_DO_MASK_CNODE_INPUT_SIZE;
  1958. }
  1959. ReplaceOneOp(replace_op[0], cnode->input(DROPOUT_GEN_MASK_INDEX)->cast<CNodePtr>());
  1960. }
  1961. void HandleTileNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1962. MS_EXCEPTION_IF_NULL(cnode);
  1963. if (cnode->size() < 3 || !IsValueNode<Primitive>(cnode->input(0))) {
  1964. return;
  1965. }
  1966. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1967. if (prim->name() != TILE) {
  1968. return;
  1969. }
  1970. TileInfoPtr tile = std::dynamic_pointer_cast<TileInfo>(distribute_operator);
  1971. MS_EXCEPTION_IF_NULL(tile);
  1972. tile->UpdateMultiples(cnode);
  1973. }
  1974. void HandleSpecialNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1975. HandleDropoutNode(distribute_operator, cnode);
  1976. HandleTileNode(distribute_operator, cnode);
  1977. }
  1978. std::set<FuncGraphPtr> FindForwardGraphByRootNodes(const AnfNodeSet &root_all_nodes) {
  1979. // J->CNode->Graph
  1980. std::set<FuncGraphPtr> graph_set;
  1981. for (auto &node : root_all_nodes) {
  1982. MS_EXCEPTION_IF_NULL(node);
  1983. if (!node->isa<CNode>()) {
  1984. continue;
  1985. }
  1986. auto cnode = node->cast<CNodePtr>();
  1987. if ((cnode->size() < 2) || !IsValueNode<Primitive>(cnode->input(0))) {
  1988. continue;
  1989. }
  1990. auto expect_j_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1991. if (expect_j_prim->name() != J) {
  1992. continue;
  1993. }
  1994. if (IsValueNode<FuncGraph>(cnode->input(1))) {
  1995. auto graph = GetValueNode<FuncGraphPtr>(cnode->input(1));
  1996. MS_LOG(DEBUG) << "Find the forward graph success";
  1997. graph_set.insert(graph);
  1998. auto manager = graph->manager();
  1999. MS_EXCEPTION_IF_NULL(manager);
  2000. auto graph_used = manager->func_graphs_used_total(graph);
  2001. for (auto &sub_graph : graph_used) {
  2002. graph_set.insert(sub_graph);
  2003. }
  2004. }
  2005. }
  2006. return graph_set;
  2007. }
  2008. void StepSplitSens(const std::pair<CNodePtr, LossNodeInfo> &sens_loss_pair) {
  2009. CNodePtr sens_node = sens_loss_pair.first;
  2010. auto loss_node = sens_loss_pair.second;
  2011. auto loss_grad_layout = GetLossNodeGradOutputLayout(loss_node);
  2012. if (!loss_grad_layout.empty()) {
  2013. SplitSens(sens_node, loss_grad_layout[0]);
  2014. }
  2015. }
  2016. // Sens node satisfies the following conditions: cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  2017. std::vector<std::pair<CNodePtr, LossNodeInfo>> GetSensLossPairs(const FuncGraphPtr &root) {
  2018. MS_EXCEPTION_IF_NULL(root);
  2019. std::vector<std::pair<CNodePtr, LossNodeInfo>> sens_loss_pairs;
  2020. for (auto &node : root->nodes()) {
  2021. if (!node->isa<CNode>()) {
  2022. continue;
  2023. }
  2024. // cnode(sens)-->cnode(tuple_getitem)
  2025. auto sens_cnode = node->cast<CNodePtr>();
  2026. AnfNodePtr expect_tuple_getitem = sens_cnode->input(0);
  2027. MS_EXCEPTION_IF_NULL(expect_tuple_getitem);
  2028. if (!expect_tuple_getitem->isa<CNode>()) {
  2029. continue;
  2030. }
  2031. auto expect_tuple_getitem_cnode = expect_tuple_getitem->cast<CNodePtr>();
  2032. if (!IsSomePrimitive(expect_tuple_getitem_cnode, TUPLE_GETITEM)) {
  2033. continue;
  2034. }
  2035. // cnode(sens)-->cnode(tuple_getitem)-->cnode
  2036. AnfNodePtr expect_anonymous = expect_tuple_getitem_cnode->input(1);
  2037. MS_EXCEPTION_IF_NULL(expect_anonymous);
  2038. if (!expect_anonymous->isa<CNode>()) {
  2039. continue;
  2040. }
  2041. // cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  2042. auto expect_anonymous_cnode = expect_anonymous->cast<CNodePtr>();
  2043. AnfNodePtr expect_j = expect_anonymous_cnode->input(0);
  2044. MS_EXCEPTION_IF_NULL(expect_j);
  2045. if (!expect_j->isa<CNode>()) {
  2046. continue;
  2047. }
  2048. auto expect_j_cnode = expect_j->cast<CNodePtr>();
  2049. if (!IsSomePrimitive(expect_j_cnode, J)) {
  2050. continue;
  2051. }
  2052. if (!IsValueNode<FuncGraph>(expect_j_cnode->input(1))) {
  2053. MS_LOG(EXCEPTION) << "Sens can't find the corresponding graph.";
  2054. }
  2055. auto func_graph = GetValueNode<FuncGraphPtr>(expect_j_cnode->input(1));
  2056. auto loss_node_info = FindLossCNode(func_graph);
  2057. if (loss_node_info.loss_node == nullptr) {
  2058. MS_LOG(WARNING) << "Can not find the loss cnode";
  2059. continue;
  2060. }
  2061. std::pair<CNodePtr, LossNodeInfo> sens_loss_pair = std::make_pair(sens_cnode, loss_node_info);
  2062. sens_loss_pairs.push_back(sens_loss_pair);
  2063. }
  2064. return sens_loss_pairs;
  2065. }
  2066. void ParallelCommunication(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes,
  2067. const FuncGraphManagerPtr &manager) {
  2068. MS_EXCEPTION_IF_NULL(root);
  2069. MS_EXCEPTION_IF_NULL(manager);
  2070. TensorRedistribution tensor_redistribution;
  2071. std::vector<std::pair<CNodePtr, LossNodeInfo>> sens_loss_pairs = GetSensLossPairs(root);
  2072. bool has_backward = !sens_loss_pairs.empty();
  2073. // split sens must before inserting the operators.
  2074. for (auto &pair : sens_loss_pairs) {
  2075. // If the shape of grad-sens tensor is not [] or [1], use get tensor slice to handel it.
  2076. // If the type of sens node is not Tensor, it is unsupported now, do nothing default.
  2077. StepSplitSens(pair);
  2078. }
  2079. for (auto &node : all_nodes) {
  2080. MS_EXCEPTION_IF_NULL(node);
  2081. if (node->isa<CNode>()) {
  2082. auto cnode = node->cast<CNodePtr>();
  2083. // the make_tuple is parallel care node, but it may have not operator info
  2084. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  2085. continue;
  2086. }
  2087. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  2088. MS_EXCEPTION_IF_NULL(distribute_operator);
  2089. // insert forward ops
  2090. InsertForwardOps(distribute_operator, cnode);
  2091. // insert redistribution ops
  2092. StepRedistribution(cnode, distribute_operator, cnode, tensor_redistribution, cnode);
  2093. // insert backward ops
  2094. if (has_backward) {
  2095. BackwardCommunication(distribute_operator, cnode, sens_loss_pairs);
  2096. }
  2097. HandleSpecialNode(distribute_operator, cnode);
  2098. } else if (IsValueNode<Tensor>(node)) {
  2099. StepSplitTensor(node, manager);
  2100. }
  2101. }
  2102. for (auto &node : all_nodes) {
  2103. MS_EXCEPTION_IF_NULL(node);
  2104. if (node->isa<CNode>()) {
  2105. auto cnode = node->cast<CNodePtr>();
  2106. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  2107. continue;
  2108. }
  2109. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  2110. MS_EXCEPTION_IF_NULL(distribute_operator);
  2111. // StepReplace
  2112. StepReplace(distribute_operator, cnode);
  2113. }
  2114. }
  2115. }
  2116. namespace {
  2117. void RevertSymbolicKeyInstance(const FuncGraphPtr &root, const AnfNodePtr &node) {
  2118. MS_EXCEPTION_IF_NULL(root);
  2119. MS_EXCEPTION_IF_NULL(node);
  2120. auto symbolic_key = GetValueNode<SymbolicKeyInstancePtr>(node);
  2121. MS_EXCEPTION_IF_NULL(symbolic_key);
  2122. auto all_upstream_node = root->manager()->node_users()[node];
  2123. for (auto &upstream_node : all_upstream_node) {
  2124. FuncGraphPtr fg = upstream_node.first->func_graph();
  2125. if (symbolic_key->node()->isa<Parameter>()) {
  2126. for (auto &param : root->parameters()) {
  2127. if (*param == *symbolic_key->node()) {
  2128. AnfNodePtr reverted_node = root->NewCNode({NewValueNode(prim::kPrimEmbed), param});
  2129. MS_EXCEPTION_IF_NULL(reverted_node);
  2130. MS_LOG(DEBUG) << "before replace " << node->ToString() << " to node " << reverted_node->DebugString();
  2131. (void)fg->manager()->Replace(node, reverted_node);
  2132. MS_LOG(DEBUG) << "revert node " << node->ToString() << " to node " << reverted_node->DebugString();
  2133. }
  2134. }
  2135. }
  2136. }
  2137. }
  2138. } // namespace
  2139. void HandleSymbolicKeyInstance(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes) {
  2140. MS_EXCEPTION_IF_NULL(root);
  2141. for (auto &node : all_nodes) {
  2142. // revert back SymbolicKeyInstance to embed() primitive
  2143. if (IsValueNode<SymbolicKeyInstance>(node)) {
  2144. RevertSymbolicKeyInstance(root, node);
  2145. continue;
  2146. }
  2147. }
  2148. }
  2149. std::vector<std::pair<std::string, int>> NodeParameterName(const CNodePtr &node) {
  2150. std::vector<AnfNodePtr> node_inputs{node->inputs()};
  2151. std::vector<std::pair<std::string, int>> param_names;
  2152. for (int i = 0; i < UintToInt(node_inputs.size()); ++i) {
  2153. auto input = node_inputs[i];
  2154. if (input->isa<Parameter>()) {
  2155. auto input_parameter = input->cast<ParameterPtr>();
  2156. if (input_parameter->has_default() && ParameterRequireGrad(input_parameter)) {
  2157. param_names.push_back({input_parameter->name(), i});
  2158. }
  2159. } else if (input->isa<CNode>()) {
  2160. CNodePtr cnode = input->cast<CNodePtr>();
  2161. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2162. return param_names;
  2163. }
  2164. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  2165. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  2166. if (prim->name() == CAST && cnode->inputs().size() >= 1) {
  2167. auto cast_input = cnode->inputs()[1];
  2168. if (cast_input->isa<Parameter>()) {
  2169. auto cast_input_parameter = cast_input->cast<ParameterPtr>();
  2170. if (cast_input_parameter->has_default() && ParameterRequireGrad(cast_input_parameter)) {
  2171. param_names.push_back({cast_input_parameter->name(), i});
  2172. }
  2173. }
  2174. }
  2175. }
  2176. }
  2177. return param_names;
  2178. }
  2179. void CheckpointStrategy(const std::vector<AnfNodePtr> &all_nodes) {
  2180. StrategyMap stra_map;
  2181. TensorInfoMap tensor_info_map;
  2182. ManualShapeMap manual_shape_map;
  2183. for (auto &node : all_nodes) {
  2184. MS_EXCEPTION_IF_NULL(node);
  2185. auto cnode = node->cast<CNodePtr>();
  2186. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  2187. continue;
  2188. }
  2189. auto param_names = NodeParameterName(cnode);
  2190. if (param_names.empty()) {
  2191. continue;
  2192. }
  2193. string param_name = param_names[0].first;
  2194. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2195. MS_EXCEPTION_IF_NULL(prim);
  2196. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  2197. if (operator_info) {
  2198. if (operator_info->name().find(RESHAPEINFO) != std::string::npos) {
  2199. continue;
  2200. }
  2201. std::vector<TensorInfo> input_tensor_info = operator_info->inputs_tensor_info();
  2202. std::string stratey_key_name = prim->name() + "_" + param_name;
  2203. stra_map[stratey_key_name] = operator_info->strategy();
  2204. for (auto param_name_pair : param_names) {
  2205. if (param_name_pair.second - 1 >= UintToInt(input_tensor_info.size())) {
  2206. continue;
  2207. }
  2208. tensor_info_map[param_name_pair.first] = input_tensor_info[param_name_pair.second - 1];
  2209. }
  2210. if (operator_info->name().find(EMBEDDING_LOOKUP) != std::string::npos ||
  2211. operator_info->name().find(GATHERV2) != std::string::npos) {
  2212. auto gatherv2_info = std::dynamic_pointer_cast<GatherV2PInfo>(operator_info);
  2213. auto param_split_shapes = gatherv2_info->param_split_shapes();
  2214. auto index_offsets = gatherv2_info->index_offsets();
  2215. if (param_split_shapes.size() != index_offsets.size()) {
  2216. MS_LOG(EXCEPTION) << "In manual split, the param_split_shapes and index_offsets lenght should be same.";
  2217. }
  2218. std::vector<std::pair<int32_t, int32_t>> manual_shape;
  2219. for (int i = 0; i < UintToInt(param_split_shapes.size()); ++i) {
  2220. manual_shape.push_back({param_split_shapes[i], index_offsets[i]});
  2221. }
  2222. manual_shape_map[param_name] = manual_shape;
  2223. }
  2224. }
  2225. }
  2226. if (StrategyCheckpoint::GetInstance().Save(stra_map, tensor_info_map, &manual_shape_map) != SUCCESS) {
  2227. MS_LOG(EXCEPTION) << "Save strategy checkpoint failed";
  2228. }
  2229. }
  2230. void SetForwardFlag(const std::vector<AnfNodePtr> &all_nodes) {
  2231. for (auto &node : all_nodes) {
  2232. MS_EXCEPTION_IF_NULL(node);
  2233. if (!node->isa<CNode>()) {
  2234. continue;
  2235. }
  2236. auto cnode = node->cast<CNodePtr>();
  2237. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2238. continue;
  2239. }
  2240. // CNode is globally unique.
  2241. MS_LOG(DEBUG) << "Set forward flag " << cnode->DebugString() << ".";
  2242. cnode->set_in_forward_flag(true);
  2243. }
  2244. }
  2245. void SetForwardFlag(const AnfNodeSet &all_nodes) {
  2246. for (auto &node : all_nodes) {
  2247. MS_EXCEPTION_IF_NULL(node);
  2248. if (!node->isa<CNode>()) {
  2249. continue;
  2250. }
  2251. auto cnode = node->cast<CNodePtr>();
  2252. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2253. continue;
  2254. }
  2255. // CNode is globally unique.
  2256. cnode->set_in_forward_flag(true);
  2257. }
  2258. }
  2259. std::set<FuncGraphPtr> ForwardGraph(const FuncGraphPtr &root) {
  2260. MS_EXCEPTION_IF_NULL(root);
  2261. const auto &all_nodes = root->nodes();
  2262. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2263. return graph_set;
  2264. }
  2265. std::vector<AnfNodePtr> FindRootForwardCNode(const FuncGraphPtr &graph, const AnfNodeSet &all_nodes) {
  2266. MS_EXCEPTION_IF_NULL(graph);
  2267. std::vector<AnfNodePtr> root_forward_nodes;
  2268. auto loss_cnode = FindLossCNode(graph).loss_node;
  2269. if (loss_cnode == nullptr) {
  2270. MS_LOG(WARNING) << "Can not find the loss cnode";
  2271. return root_forward_nodes;
  2272. }
  2273. auto loss_cnode_id = loss_cnode->UniqueIdThroughCopy();
  2274. for (auto &node : all_nodes) {
  2275. MS_EXCEPTION_IF_NULL(node);
  2276. if (!node->isa<CNode>()) {
  2277. continue;
  2278. }
  2279. auto cnode = node->cast<CNodePtr>();
  2280. auto root_node_id = node->UniqueIdThroughCopy();
  2281. if (loss_cnode_id == root_node_id) {
  2282. root_forward_nodes = DeepLinkedGraphSearch(cnode);
  2283. break;
  2284. }
  2285. }
  2286. return root_forward_nodes;
  2287. }
  2288. void InsertShapeOp(const CNodePtr &node, const AnfNodePtr &pre_node, const FuncGraphPtr &root) {
  2289. // shape op doesn't have params and attrs.
  2290. OperatorParams params;
  2291. OperatorAttrs attrs;
  2292. auto shape_value = GetValueNode(node->input(2))->cast<ValueSequeuePtr>();
  2293. MS_EXCEPTION_IF_NULL(shape_value);
  2294. auto shape = shape_value->value();
  2295. if (shape.empty()) {
  2296. return;
  2297. }
  2298. OperatorArgs args = std::make_pair(attrs, params);
  2299. Operator op = std::make_pair(SHAPE_OP, args);
  2300. InsertNode(op, node, 2, pre_node, root, "shape");
  2301. }
  2302. void HandleRootReshape(const std::vector<AnfNodePtr> &all_nodes) {
  2303. // If root graph has reshape op. Find the corresponding parameter.
  2304. // Reshape's shape is the shape of the parameter.
  2305. for (auto &node : all_nodes) {
  2306. if (!node->isa<CNode>()) {
  2307. continue;
  2308. }
  2309. auto cnode = node->cast<CNodePtr>();
  2310. if (!IsValueNode<Primitive>(cnode->input(0)) || cnode->in_forward_flag()) {
  2311. continue;
  2312. }
  2313. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2314. if (prim->name() != RESHAPE) {
  2315. continue;
  2316. }
  2317. auto root = node->func_graph();
  2318. auto all_dfs_nodes = DeepLinkedGraphSearch(node);
  2319. for (auto r_iter = all_dfs_nodes.rbegin(); r_iter != all_dfs_nodes.rend(); ++r_iter) {
  2320. if ((*r_iter)->isa<Parameter>()) {
  2321. InsertShapeOp(cnode, *r_iter, root);
  2322. break;
  2323. }
  2324. }
  2325. }
  2326. }
  2327. void MarkForwardCNode(const FuncGraphPtr &root) {
  2328. MS_EXCEPTION_IF_NULL(root);
  2329. auto all_nodes = root->nodes();
  2330. auto graph_set = FindForwardGraphByRootNodes(all_nodes);
  2331. if (graph_set.empty()) {
  2332. MS_LOG(INFO) << "Can not find the forward graph, so mark the ops in root graph";
  2333. SetForwardFlag(all_nodes);
  2334. } else {
  2335. for (auto &func_graph : graph_set) {
  2336. MS_LOG(INFO) << "The sub graph size of root is " << root->func_graphs_used().size();
  2337. auto return_node = func_graph->get_return();
  2338. MS_EXCEPTION_IF_NULL(return_node);
  2339. auto all_dfs_nodes = DeepLinkedGraphSearch(return_node);
  2340. SetForwardFlag(all_dfs_nodes);
  2341. auto root_forward_nodes = FindRootForwardCNode(func_graph, all_nodes);
  2342. if (root_forward_nodes.empty()) {
  2343. continue;
  2344. }
  2345. // Mark forward flag for the nodes in root graph.
  2346. SetForwardFlag(root_forward_nodes);
  2347. }
  2348. }
  2349. }
  2350. Status ParallelInit() {
  2351. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2352. int32_t device_num = ParallelContext::GetInstance()->device_num();
  2353. int32_t global_rank = ParallelContext::GetInstance()->global_rank();
  2354. int32_t split_stage_num = ParallelContext::GetInstance()->pipeline_stage_split_num();
  2355. std::vector<int32_t> stages = ParallelContext::GetInstance()->stage();
  2356. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2357. auto ms_context = MsContext::GetInstance();
  2358. MS_EXCEPTION_IF_NULL(ms_context);
  2359. std::string backend = ms_context->get_param<std::string>(MS_CTX_DEVICE_TARGET);
  2360. std::string world_group;
  2361. std::string communication_backend;
  2362. if (backend == kAscendDevice || backend == kDavinciDevice) {
  2363. world_group = HCCL_WORLD_GROUP;
  2364. communication_backend = HCCL_BACKEND;
  2365. } else if (backend == kGPUDevice) {
  2366. world_group = NCCL_WORLD_GROUP;
  2367. communication_backend = NCCL_BACKEND;
  2368. } else {
  2369. MS_LOG(EXCEPTION) << "Invalid communication backend: " << backend;
  2370. }
  2371. if (device_num <= 0) {
  2372. MS_LOG(ERROR) << "Invalid device num " << device_num << " , expected a positive device number";
  2373. return FAILED;
  2374. }
  2375. if (split_stage_num > 0) {
  2376. if (device_num % split_stage_num != 0) {
  2377. MS_LOG(ERROR) << "Device num " << device_num << " can't be divided by stage num " << split_stage_num
  2378. << " , as we support only extract devision now";
  2379. return FAILED;
  2380. }
  2381. for (int i = 0; i < split_stage_num; i++) {
  2382. stages.push_back(device_num / split_stage_num);
  2383. }
  2384. } else if (split_stage_num < 0) {
  2385. MS_LOG(ERROR) << "Invalid stage num " << split_stage_num << " , expected a positive stage number";
  2386. return FAILED;
  2387. }
  2388. ParallelContext::GetInstance()->set_stage(stages);
  2389. uint32_t world_rank_size = 0;
  2390. if (!ParallelContext::GetInstance()->device_num_is_set()) {
  2391. if (!CommManager::GetInstance().GetRankSize(world_group, &world_rank_size)) {
  2392. MS_LOG(EXCEPTION) << "Get rank size failed";
  2393. }
  2394. device_num = UintToInt(world_rank_size);
  2395. MS_LOG(INFO) << "Get device num from communication model, the device num is " << device_num;
  2396. }
  2397. uint32_t rank_id = 0;
  2398. if (!ParallelContext::GetInstance()->global_rank_is_set()) {
  2399. if (!CommManager::GetInstance().GetRankID(world_group, &rank_id)) {
  2400. MS_LOG(EXCEPTION) << "Get rank id failed";
  2401. }
  2402. global_rank = UintToInt(rank_id);
  2403. MS_LOG(INFO) << "Get global rank from communication model, the global rank is " << global_rank;
  2404. }
  2405. if (!stages.empty() && parallel_mode != SEMI_AUTO_PARALLEL) {
  2406. MS_LOG(ERROR) << "To enable the pipeline parallel, please set the parallel mode to " << SEMI_AUTO_PARALLEL;
  2407. return FAILED;
  2408. }
  2409. if (!InitDevice(device_num, global_rank, communication_backend, stages)) {
  2410. MS_LOG(ERROR) << "Init device failed";
  2411. return FAILED;
  2412. }
  2413. MS_LOG(INFO) << "The parallel context: dev num: " << device_num << ", global rank: " << global_rank
  2414. << ", backend: " << backend << ", gradients_mean: " << ParallelContext::GetInstance()->gradients_mean()
  2415. << ", gradient_fp32_sync: " << ParallelContext::GetInstance()->gradient_fp32_sync();
  2416. return SUCCESS;
  2417. }
  2418. void HandleForwardMakeTupleAndMakeList(const std::vector<AnfNodePtr> &all_nodes) {
  2419. for (auto &node : all_nodes) {
  2420. if (!AnfNodeIsPrimitive(node, MAKE_TUPLE) && !AnfNodeIsPrimitive(node, MAKE_LIST)) {
  2421. continue;
  2422. }
  2423. auto cnode = node->cast<CNodePtr>();
  2424. MS_EXCEPTION_IF_NULL(cnode);
  2425. if (!cnode->in_forward_flag()) {
  2426. continue;
  2427. }
  2428. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  2429. MS_EXCEPTION_IF_NULL(manager);
  2430. std::string op_type = AnfNodeIsPrimitive(node, MAKE_TUPLE) ? MAKE_TUPLE : MAKE_LIST;
  2431. auto make_tuple_list_user = manager->node_users()[cnode];
  2432. if (make_tuple_list_user.size() != 1) {
  2433. MS_LOG(EXCEPTION) << "Now the " << op_type << "'s user must be 1, but got " << make_tuple_list_user.size();
  2434. }
  2435. CNodePtr make_tuple_list_next_cnode = make_tuple_list_user.pop().first->cast<CNodePtr>();
  2436. MS_EXCEPTION_IF_NULL(make_tuple_list_next_cnode);
  2437. std::string make_tuple__list_user_prim_name = GetPrimName(make_tuple_list_next_cnode);
  2438. if (!IsParallelCareNode(make_tuple_list_next_cnode)) {
  2439. MS_LOG(INFO) << "The " << op_type << "'s user is " << make_tuple__list_user_prim_name
  2440. << ", no need to set operator info";
  2441. continue;
  2442. }
  2443. if (make_tuple_list_next_cnode->inputs().size() != 2) {
  2444. MS_LOG(EXCEPTION) << "Now the " << op_type << "'s user only support 1 input, but got "
  2445. << make_tuple_list_next_cnode->inputs().size() - 1;
  2446. }
  2447. MS_LOG(INFO) << "Set the " << op_type << "'s operator info, and the op name is " << make_tuple__list_user_prim_name;
  2448. OperatorInfoPtr op_info = GetDistributeOperator(make_tuple_list_next_cnode);
  2449. MS_EXCEPTION_IF_NULL(op_info);
  2450. cnode->set_user_data<OperatorInfo>(op_info);
  2451. }
  2452. }
  2453. RefKeyPair CNodeWithRefKeys(const AnfNodePtr &cnode) {
  2454. MS_EXCEPTION_IF_NULL(cnode);
  2455. std::vector<AnfNodePtr> refkeys;
  2456. if (cnode->isa<CNode>()) {
  2457. auto cnode_ptr = cnode->cast<CNodePtr>();
  2458. auto inputs = cnode_ptr->inputs();
  2459. for (auto &one_input : inputs) {
  2460. if (IsValueNode<RefKey>(one_input)) {
  2461. refkeys.push_back(one_input);
  2462. }
  2463. }
  2464. if (refkeys.size() >= 1) {
  2465. return std::make_pair(cnode, refkeys);
  2466. }
  2467. }
  2468. return {nullptr, refkeys};
  2469. }
  2470. ParameterUsersInfo FindParameterNodeUsers(const AnfNodePtr &node, bool (*IsCareNode)(const CNodePtr &)) {
  2471. // In this case, node is a Parameter
  2472. ParameterUsersInfo parameter_user_info;
  2473. MS_EXCEPTION_IF_NULL(node->func_graph());
  2474. MS_EXCEPTION_IF_NULL(node->func_graph()->manager());
  2475. auto candidate_set = node->func_graph()->manager()->node_users()[node];
  2476. for (auto &candidate : candidate_set) {
  2477. auto candidate_node = candidate.first;
  2478. auto c = candidate_node->cast<CNodePtr>();
  2479. if (c == nullptr || !c->has_user_data<OperatorInfo>()) {
  2480. continue;
  2481. }
  2482. (void)parameter_user_info.second.second.insert(candidate);
  2483. }
  2484. parameter_user_info.first = node->cast<ParameterPtr>()->name();
  2485. parameter_user_info.second.first = node;
  2486. return parameter_user_info;
  2487. }
  2488. ParameterUsersInfo FindRefKeyNodeUsers(const RefKeyPair &ref_key_pair, bool (*IsCareNode)(const CNodePtr &)) {
  2489. // Dealing with the RefKey case
  2490. ParameterUsersInfo parameter_user_info;
  2491. auto refkeys = ref_key_pair.second;
  2492. auto cnode = ref_key_pair.first;
  2493. auto cnode_ptr = cnode->cast<CNodePtr>();
  2494. if ((cnode_ptr == nullptr) || !IsValueNode<Primitive>(cnode_ptr->input(0)) || !IsCareNode(cnode_ptr)) {
  2495. return parameter_user_info;
  2496. }
  2497. if (refkeys.size() > 1) {
  2498. MS_LOG(EXCEPTION) << "CNode: " << cnode->fullname_with_scope() << "'s inputs have more than 1 RefKeys";
  2499. }
  2500. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  2501. auto cnode_func_graph = cnode->func_graph();
  2502. MS_EXCEPTION_IF_NULL(cnode->func_graph()->manager());
  2503. // Find the RefKey being used
  2504. auto candidate_set_by_refkey = cnode_func_graph->manager()->node_users()[refkeys[0]];
  2505. for (auto &candidate : candidate_set_by_refkey) {
  2506. auto candidate_node = candidate.first;
  2507. auto c = candidate_node->cast<CNodePtr>();
  2508. if ((c == nullptr) || !IsValueNode<Primitive>(c->input(0)) || !IsCareNode(c)) {
  2509. continue;
  2510. }
  2511. parameter_user_info.second.second.add(candidate);
  2512. }
  2513. // Find the corresponding Parameter being used
  2514. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(refkeys[0], cnode_func_graph);
  2515. if (parameters.size() != 1) {
  2516. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  2517. }
  2518. parameter_user_info.first = parameters[0]->cast<ParameterPtr>()->name();
  2519. parameter_user_info.second.first = parameters[0];
  2520. auto candidate_set_by_para = cnode_func_graph->manager()->node_users()[parameters[0]];
  2521. for (auto &candidate : candidate_set_by_para) {
  2522. auto candidate_node = candidate.first;
  2523. auto c = candidate_node->cast<CNodePtr>();
  2524. if ((c == nullptr) || !IsValueNode<Primitive>(c->input(0)) || !IsCareNode(c)) {
  2525. continue;
  2526. }
  2527. (void)parameter_user_info.second.second.insert(candidate);
  2528. }
  2529. return parameter_user_info;
  2530. }
  2531. ParameterUsersInfo FindParameterUsers(const AnfNodePtr &node, bool (*IsCareNode)(const CNodePtr &)) {
  2532. ParameterUsersInfo parameter_users_info;
  2533. auto cnode_with_refkeys = CNodeWithRefKeys(node);
  2534. if (cnode_with_refkeys.first != nullptr) {
  2535. // the node is a ref key node
  2536. return FindRefKeyNodeUsers(cnode_with_refkeys, IsCareNode);
  2537. } else if (node->isa<Parameter>()) {
  2538. // the node is a parameter node
  2539. return FindParameterNodeUsers(node, IsCareNode);
  2540. }
  2541. return parameter_users_info;
  2542. }
  2543. Shape ParameterSliceShape(const std::pair<AnfNodePtr, int> &param_info) {
  2544. auto user_cnode = param_info.first->cast<CNodePtr>();
  2545. MS_EXCEPTION_IF_NULL(user_cnode);
  2546. auto user_input_index = param_info.second;
  2547. OperatorInfoPtr op_info = user_cnode->user_data<OperatorInfo>();
  2548. MS_EXCEPTION_IF_NULL(op_info);
  2549. size_t input_tensor_info_size = op_info->inputs_tensor_info().size();
  2550. if (SizeToInt(input_tensor_info_size) <= user_input_index - 1) {
  2551. MS_LOG(EXCEPTION) << op_info->name() << ": the size of inputs tensor info is " << input_tensor_info_size
  2552. << ", but the index is " << user_input_index - 1;
  2553. }
  2554. TensorInfo tensor_info = op_info->inputs_tensor_info()[user_input_index - 1];
  2555. MS_LOG(DEBUG) << "The op name is " << op_info->name() << ", the parameter index is " << user_input_index - 1
  2556. << ", the slice shape is " << ShapeToString(tensor_info.slice_shape()) << ", the origin shape is "
  2557. << ShapeToString(tensor_info.shape());
  2558. return tensor_info.slice_shape();
  2559. }
  2560. void CheckParameterSplit(const std::vector<AnfNodePtr> &all_nodes) {
  2561. for (auto &node : all_nodes) {
  2562. ParameterUsersInfo parameter_users_info = FindParameterUsers(node, IsParallelCareNode);
  2563. auto users_set = parameter_users_info.second.second;
  2564. if (users_set.size() <= 1) {
  2565. continue;
  2566. }
  2567. auto parameter_name = parameter_users_info.first;
  2568. MS_LOG(INFO) << "The parameter: " << parameter_name << " has " << users_set.size() << " users";
  2569. auto first_user = users_set.pop();
  2570. Shape first_user_slice_shape = ParameterSliceShape(first_user);
  2571. for (auto &user : users_set) {
  2572. Shape user_slice_shape = ParameterSliceShape(user);
  2573. if (first_user_slice_shape != user_slice_shape) {
  2574. MS_LOG(EXCEPTION) << "The parameter: " << parameter_name
  2575. << " has multiple users, but the split strategies are different";
  2576. }
  2577. }
  2578. }
  2579. }
  2580. bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) {
  2581. MS_EXCEPTION_IF_NULL(root);
  2582. MS_EXCEPTION_IF_NULL(optimizer);
  2583. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2584. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2585. // assume no change to graph
  2586. bool changes = false;
  2587. // control whether use model_parallel mode
  2588. if (!root->has_flag(AUTO_PARALLEL) || ((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) ||
  2589. (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) {
  2590. if (!root->has_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY)) {
  2591. if (HasStrategy(root)) {
  2592. MS_LOG(INFO) << "Strategies ignored in " << parallel_mode
  2593. << ", set_strategy() only valid in [semi_]auto_parallel.";
  2594. }
  2595. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  2596. }
  2597. return changes;
  2598. }
  2599. struct timeval start_time, end_time;
  2600. (void)gettimeofday(&start_time, nullptr);
  2601. MS_LOG(INFO) << "Now entering step parallel";
  2602. DumpGraph(root, std::string(STEP_PARALLEL_BEGIN));
  2603. pipeline::ResourceBasePtr res = optimizer->resource();
  2604. MS_EXCEPTION_IF_NULL(res);
  2605. FuncGraphManagerPtr manager = res->manager();
  2606. MS_EXCEPTION_IF_NULL(manager);
  2607. AnfNodePtr ret = root->get_return();
  2608. MS_EXCEPTION_IF_NULL(ret);
  2609. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  2610. std::reverse(all_nodes.begin(), all_nodes.end());
  2611. if (parallel_mode != AUTO_PARALLEL) {
  2612. TOTAL_OPS = 0;
  2613. if (ParallelInit() != SUCCESS) {
  2614. MS_LOG(EXCEPTION) << "Parallel init failed";
  2615. }
  2616. // mark the forward cnodes, parallel only care these nodes
  2617. MarkForwardCNode(root);
  2618. if (FindCommunicationOp(all_nodes)) {
  2619. MS_LOG(EXCEPTION) << "The graph contain communication op";
  2620. }
  2621. // extract shape and strategy, set operator_info
  2622. ExtractInformation(all_nodes);
  2623. ReshapeInit(all_nodes);
  2624. }
  2625. HandleRootReshape(all_nodes);
  2626. HandleForwardMakeTupleAndMakeList(all_nodes);
  2627. // if the input or parameter has multiple users, check whether its split strategies are consistent.
  2628. CheckParameterSplit(all_nodes);
  2629. // save strategy as checkpoint for multi-train
  2630. if (StrategyCheckpoint::GetInstance().SaveCheckPointOn()) {
  2631. CheckpointStrategy(all_nodes);
  2632. }
  2633. HandleSymbolicKeyInstance(root, all_nodes);
  2634. // cover Parallel shape
  2635. CoverSliceShape(root);
  2636. // set the shape for optimizer's clone tensor
  2637. SetClonedTensorShapeForOptimizer(root);
  2638. // ForwardCommunication BackwardCommunication TensorRedistribution
  2639. ParallelCommunication(root, all_nodes, manager);
  2640. DumpGraph(root, std::string(STEP_PARALLEL_END));
  2641. // step parallel only run once
  2642. root->set_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY, true);
  2643. res->results()[pipeline::kStepParallelGraph] = root;
  2644. // in auto parallel mode, no need to check if stategies set
  2645. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  2646. (void)gettimeofday(&end_time, nullptr);
  2647. uint64_t time = kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec - start_time.tv_sec);
  2648. time += static_cast<uint64_t>(end_time.tv_usec - start_time.tv_usec);
  2649. MS_LOG(INFO) << "Now leaving step parallel, used time: " << time << " us";
  2650. return changes;
  2651. }
  2652. // Needed by rec_parser
  2653. std::vector<std::string> ExtractInputsTensorName(const CNodePtr &node) {
  2654. std::vector<std::string> name_inputs;
  2655. std::vector<AnfNodePtr> all_inputs = node->inputs();
  2656. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  2657. std::string node_id = node->UniqueId();
  2658. name_inputs.push_back(node_id);
  2659. for (auto &input : node_inputs) {
  2660. std::string name = input->UniqueId();
  2661. name_inputs.push_back(name);
  2662. }
  2663. return name_inputs;
  2664. }
  2665. } // namespace parallel
  2666. } // namespace mindspore