You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

step_parallel.cc 134 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "frontend/parallel/step_parallel.h"
  17. #include <inttypes.h>
  18. #include <sys/time.h>
  19. #include <algorithm>
  20. #include <map>
  21. #include <memory>
  22. #include <set>
  23. #include <string>
  24. #include <unordered_map>
  25. #include <utility>
  26. #include "frontend/operator/ops.h"
  27. #include "frontend/optimizer/optimizer.h"
  28. #include "frontend/parallel/auto_parallel/graph_costmodel.h"
  29. #include "frontend/parallel/context.h"
  30. #include "frontend/parallel/device_manager.h"
  31. #include "frontend/parallel/dynamic_creator.h"
  32. #include "frontend/parallel/graph_util/generate_graph.h"
  33. #include "frontend/parallel/graph_util/graph_info.h"
  34. #include "frontend/parallel/graph_util/node_info.h"
  35. #include "frontend/parallel/node_check.h"
  36. #include "frontend/parallel/ops_info/matmul_info.h"
  37. #include "frontend/parallel/strategy_checkpoint/parallel_strategy_checkpoint.h"
  38. #include "ir/param_info.h"
  39. #include "ir/tensor.h"
  40. #include "utils/comm_manager.h"
  41. #include "utils/ms_context.h"
  42. #include "utils/symbolic.h"
  43. #include "mindspore/core/utils/parallel_node_check.h"
  44. #if (ENABLE_CPU && (ENABLE_D || ENABLE_GPU))
  45. #include "ps/util.h"
  46. #endif
  47. using mindspore::tensor::Tensor;
  48. namespace mindspore {
  49. namespace parallel {
  50. static const std::set<std::string> COMMUNICATION_OPS = {ALL_REDUCE, ALL_GATHER, ALL_TO_ALL, REDUCE_SCATTER};
  51. static const std::set<std::string> INVALID_LOSS_OPS = {GET_NEXT, VIRTUALLOSS};
  52. // g_RefMap, for CNode B input i is a RefKey[Parameter C],
  53. // it will be one item in map with key: C, and value: (B, i)
  54. static std::map<AnfNodePtr, std::pair<AnfNodePtr, int64_t>> g_RefMap;
  55. void SetCommunicationOpGroupLabel(std::vector<AnfNodePtr> new_node_input) {
  56. if (new_node_input.empty()) {
  57. return;
  58. }
  59. ValueNodePtr prim_anf_node = new_node_input[0]->cast<ValueNodePtr>();
  60. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  61. MS_EXCEPTION_IF_NULL(prim);
  62. auto attrs = prim->attrs();
  63. auto iter = attrs.find(GROUP);
  64. if (iter != attrs.end()) {
  65. auto value = iter->second;
  66. MS_EXCEPTION_IF_NULL(value);
  67. if (value->isa<StringImm>()) {
  68. std::string hash_name = value->cast<StringImmPtr>()->value();
  69. MS_EXCEPTION_IF_NULL(g_device_manager);
  70. std::string rank_list_name = g_device_manager->FindRankListNameByHashName(hash_name);
  71. (void)prim->AddAttr(GROUP_RANKS, MakeValue(rank_list_name));
  72. }
  73. }
  74. }
  75. std::vector<AnfNodePtr> CreateInput(const Operator &op, const AnfNodePtr &node, const std::string &instance_name) {
  76. MS_EXCEPTION_IF_NULL(node);
  77. OperatorArgs arg_forward = op.second;
  78. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op.first, instance_name);
  79. MS_EXCEPTION_IF_NULL(pyop_instance);
  80. OperatorParams params = arg_forward.second;
  81. std::vector<AnfNodePtr> new_node_input = {NewValueNode(pyop_instance), node};
  82. if (!params.empty()) {
  83. for (auto &param : params) {
  84. AnfNodePtr val = NewValueNode(param.first.second);
  85. MS_EXCEPTION_IF_NULL(val);
  86. int64_t position = param.second;
  87. (void)new_node_input.insert(new_node_input.begin() + position, val);
  88. }
  89. }
  90. // if the op have 'group' attr, set the rank list name for the op
  91. SetCommunicationOpGroupLabel(new_node_input);
  92. return new_node_input;
  93. }
  94. void InsertNode(const Operator &op, const CNodePtr &node, size_t index, const AnfNodePtr &pre_node,
  95. const FuncGraphPtr &func_graph, const std::string &instance_name) {
  96. // insert new node before the node
  97. FuncGraphManagerPtr manager = func_graph->manager();
  98. MS_EXCEPTION_IF_NULL(manager);
  99. ScopePtr scope = node->scope();
  100. MS_EXCEPTION_IF_NULL(scope);
  101. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  102. CNodePtr new_node = func_graph->NewCNode(node_input);
  103. MS_EXCEPTION_IF_NULL(new_node);
  104. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  105. new_node->set_in_forward_flag(true); // mark forward flag
  106. }
  107. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  108. MS_EXCEPTION_IF_NULL(new_node_value);
  109. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  110. new_node_prim->set_instance_name(instance_name);
  111. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  112. new_node->set_scope(scope);
  113. node_input[0]->set_scope(scope);
  114. manager->SetEdge(node, SizeToLong(index), new_node);
  115. MS_LOG(INFO) << "Insert " << instance_name << " success";
  116. }
  117. bool ParameterIsCloned(const AnfNodePtr &parameter_node) {
  118. MS_EXCEPTION_IF_NULL(parameter_node);
  119. auto cloned_parameter = parameter_node->cast<ParameterPtr>();
  120. MS_EXCEPTION_IF_NULL(cloned_parameter);
  121. // find the clone parameter
  122. if (!cloned_parameter->has_default()) {
  123. return false;
  124. }
  125. auto param_value = cloned_parameter->param_info();
  126. if (param_value == nullptr) {
  127. return false;
  128. }
  129. bool cloned = param_value->cloned();
  130. if (!cloned) {
  131. return false;
  132. }
  133. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name() << " is cloned";
  134. return true;
  135. }
  136. std::vector<AnfNodePtr> CreateMirrorInput(const FuncGraphPtr &root, const Operator &op, const AnfNodePtr &node,
  137. const std::string &instance_name, const std::string &weight_name) {
  138. MS_EXCEPTION_IF_NULL(root);
  139. MS_EXCEPTION_IF_NULL(node);
  140. MS_EXCEPTION_IF_NULL(root->manager());
  141. AnfNodePtr local_step_param = nullptr;
  142. AnfNodePtr grad_accu = nullptr;
  143. std::string op_name = op.first;
  144. OperatorArgs arg_forward = op.second;
  145. int64_t grad_accumulation_step = ParallelContext::GetInstance()->grad_accumulation_step();
  146. if (grad_accumulation_step > 1) {
  147. bool find_locat_step_node = false;
  148. auto parameters = root->parameters();
  149. for (auto &param : parameters) {
  150. auto param_ptr = param->cast<ParameterPtr>();
  151. MS_EXCEPTION_IF_NULL(param_ptr);
  152. if (param_ptr->name() == LOCAL_STEP) {
  153. auto param_users = root->manager()->node_users()[param];
  154. for (auto &user : param_users) {
  155. if (AnfNodeIsPrimitive(user.first, ASSIGN)) {
  156. find_locat_step_node = true;
  157. local_step_param = user.first;
  158. MS_LOG(INFO) << "Find the local step when create mirror, it may be in the mini step grad accumulation mode";
  159. break;
  160. }
  161. }
  162. break;
  163. }
  164. }
  165. bool find_grad_accu_node = false;
  166. for (auto &param : parameters) {
  167. if (!ParameterIsCloned(param)) {
  168. continue;
  169. }
  170. auto param_ptr = param->cast<ParameterPtr>();
  171. MS_EXCEPTION_IF_NULL(param_ptr);
  172. if (param_ptr->name().find(weight_name) != std::string::npos &&
  173. param_ptr->name().find(ACCU_GRADS) != std::string::npos) {
  174. find_grad_accu_node = true;
  175. grad_accu = param;
  176. MS_LOG(INFO) << "Find the accumulation grad node: " << param_ptr->name();
  177. break;
  178. }
  179. }
  180. if (op_name == MIRROR_MINI_STEP_OPERATOR) {
  181. if (!find_locat_step_node || !find_grad_accu_node) {
  182. op_name = MIRROR_OPERATOR;
  183. arg_forward.first.pop_back();
  184. }
  185. }
  186. }
  187. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op_name, instance_name);
  188. MS_EXCEPTION_IF_NULL(pyop_instance);
  189. OperatorParams params = arg_forward.second;
  190. std::vector<AnfNodePtr> new_node_input;
  191. if (op_name == MIRROR_MINI_STEP_OPERATOR) {
  192. new_node_input = {NewValueNode(pyop_instance), node, local_step_param, grad_accu};
  193. MS_LOG(INFO) << "Insert the local step node and grad accumulation node as the mirror op's input";
  194. } else {
  195. new_node_input = {NewValueNode(pyop_instance), node};
  196. }
  197. if (!params.empty()) {
  198. for (auto &param : params) {
  199. AnfNodePtr val = NewValueNode(param.first.second);
  200. MS_EXCEPTION_IF_NULL(val);
  201. int64_t position = param.second;
  202. (void)new_node_input.insert(new_node_input.begin() + position, val);
  203. }
  204. }
  205. // if the op have 'group' attr, set the rank list name for the op
  206. SetCommunicationOpGroupLabel(new_node_input);
  207. return new_node_input;
  208. }
  209. void InsertMirrorNode(const FuncGraphPtr &root, const Operator &op, const CNodePtr &node, size_t index,
  210. const AnfNodePtr &pre_node, const FuncGraphPtr &func_graph, const std::string &instance_name,
  211. const std::string &param_name) {
  212. // insert new node before the node
  213. FuncGraphManagerPtr manager = func_graph->manager();
  214. MS_EXCEPTION_IF_NULL(manager);
  215. ScopePtr scope = node->scope();
  216. MS_EXCEPTION_IF_NULL(scope);
  217. std::vector<AnfNodePtr> node_input = CreateMirrorInput(root, op, pre_node, instance_name, param_name);
  218. CNodePtr new_node = func_graph->NewCNode(node_input);
  219. MS_EXCEPTION_IF_NULL(new_node);
  220. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  221. new_node->set_in_forward_flag(true); // mark forward flag
  222. }
  223. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  224. MS_EXCEPTION_IF_NULL(new_node_value);
  225. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  226. new_node_prim->set_instance_name(instance_name);
  227. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  228. new_node->set_scope(scope);
  229. node_input[0]->set_scope(scope);
  230. manager->SetEdge(node, SizeToLong(index), new_node);
  231. MS_LOG(INFO) << "Insert " << instance_name << " success";
  232. }
  233. // Replace pre_node with pre_node->op
  234. static CNodePtr ReplaceNode(const Operator &op, const AnfNodePtr &pre_node, const FuncGraphPtr &func_graph,
  235. const std::string &instance_name) {
  236. // insert new node before the node
  237. FuncGraphManagerPtr manager = func_graph->manager();
  238. MS_EXCEPTION_IF_NULL(manager);
  239. ScopePtr scope = pre_node->scope();
  240. MS_EXCEPTION_IF_NULL(scope);
  241. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  242. CNodePtr new_node = func_graph->NewCNode(node_input);
  243. MS_EXCEPTION_IF_NULL(new_node);
  244. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  245. new_node->set_in_forward_flag(true); // mark forward flag
  246. }
  247. auto new_node_prim = GetValueNode<PrimitivePtr>(node_input[0]);
  248. new_node_prim->set_instance_name(instance_name);
  249. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  250. new_node->set_scope(scope);
  251. node_input[0]->set_scope(scope);
  252. manager->Replace(pre_node, new_node);
  253. MS_LOG(INFO) << "Insert " << instance_name << " success";
  254. return new_node;
  255. }
  256. std::string CreateInstanceName(const CNodePtr &node, size_t index) {
  257. MS_EXCEPTION_IF_NULL(node);
  258. if (!IsValueNode<Primitive>(node->input(0))) {
  259. MS_LOG(EXCEPTION) << "CreateInstanceName: " << node->ToString() << " doesn't have primitive";
  260. }
  261. std::string name_base = node->fullname_with_scope();
  262. std::string name = name_base + "_" + std::to_string(index);
  263. std::string instance_name = HashInstanceName(name);
  264. return instance_name;
  265. }
  266. void ForwardCommunication(OperatorVector forward_op, const CNodePtr &node) {
  267. MS_EXCEPTION_IF_NULL(node);
  268. // step1:get graph manager distribute_operator
  269. FuncGraphPtr func_graph = node->func_graph();
  270. MS_EXCEPTION_IF_NULL(func_graph);
  271. FuncGraphManagerPtr manager = func_graph->manager();
  272. MS_EXCEPTION_IF_NULL(manager);
  273. auto uses_set = manager->node_users()[node];
  274. CNodePtr node_to_insert = node;
  275. for (auto &uses_pair : uses_set) {
  276. auto uses_cnode = uses_pair.first->cast<CNodePtr>();
  277. MS_EXCEPTION_IF_NULL(uses_cnode);
  278. if (!IsValueNode<Primitive>(uses_cnode->input(0))) {
  279. break;
  280. }
  281. PrimitivePtr value_node_prim = GetValueNode<PrimitivePtr>(uses_cnode->input(0));
  282. MS_EXCEPTION_IF_NULL(value_node_prim);
  283. if (value_node_prim->name() == TUPLE_GETITEM) {
  284. if (uses_set.size() > 1) {
  285. MS_LOG(EXCEPTION) << "Now only support one output, but got " << uses_set.size();
  286. }
  287. node_to_insert = uses_cnode;
  288. }
  289. }
  290. MS_EXCEPTION_IF_NULL(node_to_insert);
  291. std::reverse(forward_op.begin(), forward_op.end());
  292. // step2:traverse op_list and insert node
  293. for (size_t index = 0; index < forward_op.size(); ++index) {
  294. std::string instance_name_base = FORWARD_OP;
  295. std::string instance_name = instance_name_base + "_" + CreateInstanceName(node, index);
  296. std::vector<AnfNodePtr> forward_input = CreateInput(forward_op[index], node_to_insert, instance_name);
  297. CNodePtr forward_node = func_graph->NewCNode(forward_input); // using NewCNode to create anfnode
  298. MS_EXCEPTION_IF_NULL(forward_node);
  299. ScopePtr scope = node->scope();
  300. MS_EXCEPTION_IF_NULL(scope);
  301. forward_node->set_scope(scope);
  302. forward_node->set_in_forward_flag(true);
  303. forward_input[0]->set_scope(scope);
  304. (void)manager->Replace(node_to_insert, forward_node); // using Replace function to insert node
  305. }
  306. }
  307. CNodePtr InsertMakeTuple(const AnfNodePtr &prev, uint64_t num, const FuncGraphPtr &func_graph) {
  308. MS_EXCEPTION_IF_NULL(prev);
  309. MS_EXCEPTION_IF_NULL(func_graph);
  310. std::vector<AnfNodePtr> make_tuple_inputs;
  311. make_tuple_inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
  312. for (uint64_t i = 0; i < num; i++) {
  313. std::vector<AnfNodePtr> tuple_get_item_inputs{NewValueNode(prim::kPrimTupleGetItem), prev,
  314. CreatInt64Imm(UlongToLong(i))};
  315. auto tuple_get_item = func_graph->NewCNode(tuple_get_item_inputs);
  316. MS_EXCEPTION_IF_NULL(tuple_get_item);
  317. make_tuple_inputs.push_back(tuple_get_item);
  318. }
  319. auto make_tuple = func_graph->NewCNode(make_tuple_inputs);
  320. MS_EXCEPTION_IF_NULL(make_tuple);
  321. FuncGraphManagerPtr manager = func_graph->manager();
  322. MS_EXCEPTION_IF_NULL(manager);
  323. (void)manager->Replace(prev, make_tuple);
  324. return make_tuple;
  325. }
  326. void InsertRedistribution(const RedistributionOpListPtr &redistribution_oplist_ptr, const CNodePtr &node,
  327. const FuncGraphPtr &func_graph, int64_t pos, const CNodePtr &pre_node) {
  328. MS_EXCEPTION_IF_NULL(node);
  329. MS_EXCEPTION_IF_NULL(pre_node);
  330. MS_EXCEPTION_IF_NULL(func_graph);
  331. FuncGraphManagerPtr manager = func_graph->manager();
  332. MS_EXCEPTION_IF_NULL(manager);
  333. if ((redistribution_oplist_ptr->first).size() != (redistribution_oplist_ptr->second).size()) {
  334. MS_LOG(EXCEPTION) << "size of OperatorVector and OutPutInfoVector must be the same!";
  335. }
  336. for (size_t index = 0; index < (redistribution_oplist_ptr->first).size(); ++index) {
  337. if (pos >= SizeToLong(node->inputs().size())) {
  338. MS_LOG(EXCEPTION) << "InsertRedistribution:pos can't be larger than node's inputs'size";
  339. }
  340. // Create new node
  341. AnfNodePtr target_node = node->input(LongToSize(pos));
  342. MS_EXCEPTION_IF_NULL(target_node);
  343. // Create instance_name
  344. auto op = (redistribution_oplist_ptr->first)[index];
  345. std::string op_name = (redistribution_oplist_ptr->first)[index].first;
  346. std::string instance_name_base = REDISTRIBUTION_OP;
  347. std::string instance_name = instance_name_base + "_" + CreateInstanceName(pre_node, index) + op_name;
  348. InsertNode(op, node, LongToSize(pos), target_node, func_graph, instance_name);
  349. if ((redistribution_oplist_ptr->second)[index].first) {
  350. target_node = node->input(LongToSize(pos));
  351. MS_EXCEPTION_IF_NULL(target_node);
  352. (void)InsertMakeTuple(target_node, (redistribution_oplist_ptr->second)[index].second, func_graph);
  353. }
  354. }
  355. }
  356. void InsertGetTensorSliceOp(const Operator &op, const CNodePtr &node, const FuncGraphPtr &func_graph, int64_t pos,
  357. const std::string &instance_name) {
  358. if (func_graph == nullptr) {
  359. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: the graph is null, the instance name is " << instance_name;
  360. }
  361. FuncGraphManagerPtr manager = func_graph->manager();
  362. MS_EXCEPTION_IF_NULL(manager);
  363. if (pos >= SizeToLong(node->inputs().size())) {
  364. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: pos can't be larger than node's inputs'size, the instance name is "
  365. << instance_name;
  366. }
  367. // Create new node
  368. AnfNodePtr pre_node = node->input(LongToSize(pos));
  369. MS_EXCEPTION_IF_NULL(pre_node);
  370. InsertNode(op, node, LongToSize(pos), pre_node, func_graph, instance_name);
  371. }
  372. TensorLayout GetTensorInLayout(const CNodePtr &middle_node, const PrimitivePtr &middle_prim,
  373. const OperatorInfoPtr &distribute_operator) {
  374. TensorInfo tensorinfo_in;
  375. if (middle_prim->name() == TUPLE_GETITEM) {
  376. auto value_node = middle_node->input(2)->cast<ValueNodePtr>();
  377. MS_EXCEPTION_IF_NULL(value_node);
  378. size_t index_s = LongToSize(GetValue<int64_t>(value_node->value()));
  379. if (index_s >= distribute_operator->outputs_tensor_info().size()) {
  380. MS_LOG(EXCEPTION) << "The index out of range, index: " << index_s
  381. << ", vector size: " << distribute_operator->outputs_tensor_info().size();
  382. }
  383. tensorinfo_in = distribute_operator->outputs_tensor_info()[index_s];
  384. } else {
  385. if (distribute_operator->outputs_tensor_info().empty()) {
  386. MS_LOG(EXCEPTION) << "The outputs tensor info is empty";
  387. }
  388. tensorinfo_in = distribute_operator->outputs_tensor_info()[0];
  389. }
  390. return tensorinfo_in.tensor_layout();
  391. }
  392. std::string GetPrimName(const CNodePtr &node) {
  393. MS_EXCEPTION_IF_NULL(node);
  394. if (!IsValueNode<Primitive>(node->input(0))) {
  395. MS_LOG(EXCEPTION) << "The node is not a primitive";
  396. }
  397. auto value_node = node->input(0)->cast<ValueNodePtr>();
  398. auto prim = GetValueNode<PrimitivePtr>(value_node);
  399. MS_EXCEPTION_IF_NULL(prim);
  400. return prim->name();
  401. }
  402. OperatorInfoPtr GetDistributeOperator(const CNodePtr &node) {
  403. MS_EXCEPTION_IF_NULL(node);
  404. if (!IsParallelCareNode(node)) {
  405. return nullptr;
  406. }
  407. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  408. if (distribute_operator == nullptr) {
  409. MS_LOG(EXCEPTION) << "Distribute operator is nullptr, the prim is " << GetPrimName(node);
  410. }
  411. return distribute_operator;
  412. }
  413. void Redistribution(const std::pair<AnfNodePtr, int64_t> &node_pair, const OperatorInfoPtr &distribute_operator,
  414. const CNodePtr &middle_node, int64_t index, TensorRedistribution tensor_redistribution,
  415. const CNodePtr &pre_node) {
  416. FuncGraphPtr func_graph = middle_node->func_graph();
  417. if (func_graph == nullptr) {
  418. MS_LOG(EXCEPTION) << "Redistribution:get graph failed";
  419. }
  420. CNodePtr next_node = node_pair.first->cast<CNodePtr>();
  421. MS_EXCEPTION_IF_NULL(next_node);
  422. auto middle_value = middle_node->input(0)->cast<ValueNodePtr>();
  423. MS_EXCEPTION_IF_NULL(middle_value);
  424. PrimitivePtr middle_prim = middle_value->value()->cast<PrimitivePtr>();
  425. MS_EXCEPTION_IF_NULL(middle_prim);
  426. OperatorInfoPtr next_distribute_operator = GetDistributeOperator(next_node);
  427. if (next_distribute_operator == nullptr) {
  428. MS_LOG(EXCEPTION) << "Failure: " << next_node->ToString() << " GetDistributeOperator failed";
  429. }
  430. RankList dev_list = distribute_operator->stage_device_list();
  431. std::string next_prim_name = GetValueNode<PrimitivePtr>(next_node->input(0))->name();
  432. MS_LOG(DEBUG) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim " << next_prim_name;
  433. MS_LOG(DEBUG) << "Redistribution: middle_node " << middle_node->ToString() << " next_node " << next_node->ToString();
  434. // extract tensor layout in and out
  435. if (distribute_operator->outputs_tensor_info().empty()) {
  436. MS_LOG(WARNING) << "pre_node's tensorinfo_in is empty, operator name is " << distribute_operator->name();
  437. return;
  438. }
  439. if (LongToSize(index - 1) >= next_distribute_operator->inputs_tensor_info().size()) {
  440. MS_LOG(WARNING) << "The index is out of range, the index is " << index - 1 << ", the vector size is "
  441. << next_distribute_operator->inputs_tensor_info().size() << "next operator name is "
  442. << next_distribute_operator->name();
  443. return;
  444. }
  445. TensorInfo tensorinfo_out = next_distribute_operator->inputs_tensor_info()[LongToSize(index - 1)];
  446. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  447. TensorLayout tensorlayout_in = GetTensorInLayout(middle_node, middle_prim, distribute_operator);
  448. if (tensor_redistribution.Init(tensorlayout_in, tensorlayout_out, dev_list) == FAILED) {
  449. MS_LOG(ERROR) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim : " << next_prim_name;
  450. MS_LOG(ERROR) << "Redistribution: middle_node " << middle_node->ToString() << " next_node "
  451. << next_node->ToString();
  452. DumpGraph(func_graph, "redistribution_error");
  453. MS_LOG(EXCEPTION) << "Failure:tensor_redistribution init failed";
  454. }
  455. RedistributionOpListPtr redistribution_oplist_ptr = tensor_redistribution.InferTensorRedistributionOperatorList();
  456. if (redistribution_oplist_ptr == nullptr) {
  457. MS_LOG(EXCEPTION) << "Failure:InferTensorRedistribution failed";
  458. }
  459. MS_LOG(DEBUG) << "Redistribution size " << redistribution_oplist_ptr->first.size();
  460. if (!redistribution_oplist_ptr->first.empty()) {
  461. // insert node before next node
  462. InsertRedistribution(redistribution_oplist_ptr, next_node, func_graph, node_pair.second, pre_node);
  463. }
  464. }
  465. bool StrategyFound(std::unordered_map<std::string, ValuePtr> attrs) {
  466. auto iter = attrs.find(STRATEGY);
  467. return !((iter == attrs.end()) || (iter->second->type_name() == NONE));
  468. }
  469. bool HasStrategy(const FuncGraphPtr &root) {
  470. AnfNodePtr ret = root->get_return();
  471. MS_EXCEPTION_IF_NULL(ret);
  472. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  473. for (auto &node : all_nodes) {
  474. auto cnode = node->cast<CNodePtr>();
  475. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  476. continue;
  477. }
  478. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  479. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  480. auto attrs = prim->attrs();
  481. if (StrategyFound(attrs)) {
  482. return true;
  483. }
  484. }
  485. return false;
  486. }
  487. bool IsCommunicationOp(const PrimitivePtr &prim) {
  488. MS_EXCEPTION_IF_NULL(prim);
  489. return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end());
  490. }
  491. bool FindCommunicationOp(const std::vector<AnfNodePtr> &all_nodes) {
  492. for (auto &node : all_nodes) {
  493. MS_EXCEPTION_IF_NULL(node);
  494. if (!node->isa<CNode>()) {
  495. continue;
  496. }
  497. auto cnode = node->cast<CNodePtr>();
  498. if (!IsValueNode<Primitive>(cnode->input(0))) {
  499. continue;
  500. }
  501. ValueNodePtr prim_value_node = cnode->input(0)->cast<ValueNodePtr>();
  502. MS_EXCEPTION_IF_NULL(prim_value_node);
  503. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_value_node);
  504. MS_EXCEPTION_IF_NULL(prim);
  505. if (IsCommunicationOp(prim) && cnode->in_forward_flag()) {
  506. MS_EXCEPTION_IF_NULL(prim_value_node->scope());
  507. MS_LOG(INFO) << "The graph contain communication op: " << prim->name() << ", scope name is "
  508. << prim_value_node->scope()->name();
  509. return true;
  510. }
  511. }
  512. return false;
  513. }
  514. bool IsParallelCareNode(const CNodePtr &cnode) {
  515. MS_EXCEPTION_IF_NULL(cnode);
  516. ValueNodePtr prim_node = cnode->input(0)->cast<ValueNodePtr>();
  517. if (prim_node == nullptr) {
  518. return false;
  519. }
  520. PrimitivePtr prim = prim_node->value()->cast<PrimitivePtr>();
  521. if (prim == nullptr) {
  522. return false;
  523. }
  524. if (IsInParallelBlackList(prim)) {
  525. MS_LOG(DEBUG) << "Parallel don't care node: " << prim->name();
  526. return false;
  527. }
  528. // get_next is not in the forward graph, we need mark the get_next as the forward node
  529. if (prim->name() == GET_NEXT) {
  530. return true;
  531. }
  532. if ((prim->name() == CAST) && !cnode->has_user_data<OperatorInfo>()) {
  533. return false;
  534. }
  535. return cnode->in_forward_flag();
  536. }
  537. void StepRedistribution(const CNodePtr &node, const OperatorInfoPtr &distribute_operator, const CNodePtr &insert_node,
  538. const TensorRedistribution &tensor_redistribution, const CNodePtr &pre_node) {
  539. MS_EXCEPTION_IF_NULL(node->func_graph());
  540. FuncGraphManagerPtr manager = node->func_graph()->manager();
  541. MS_EXCEPTION_IF_NULL(manager);
  542. AnfNodeIndexSet node_set = manager->node_users()[node];
  543. CNodePtr insert_node_new;
  544. if (AnfNodeIsPrimitive(node, MAKE_TUPLE) || AnfNodeIsPrimitive(node, MAKE_LIST)) {
  545. MS_LOG(INFO) << "No need to insert redistribution op between make_tuple node and the next node";
  546. return;
  547. }
  548. if (IsValueNode<Primitive>(node->input(0))) {
  549. auto current_value = node->input(0)->cast<ValueNodePtr>();
  550. MS_EXCEPTION_IF_NULL(current_value);
  551. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  552. MS_EXCEPTION_IF_NULL(current_prim);
  553. insert_node_new = ((current_prim->name() == TUPLE_GETITEM) ? node : insert_node);
  554. } else {
  555. insert_node_new = insert_node;
  556. }
  557. MS_EXCEPTION_IF_NULL(insert_node_new);
  558. for (auto &node_pair : node_set) {
  559. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  560. MS_EXCEPTION_IF_NULL(use_cnode);
  561. if (!IsValueNode<Primitive>(use_cnode->input(0))) {
  562. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  563. } else {
  564. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  565. MS_EXCEPTION_IF_NULL(prim_anf_node);
  566. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  567. MS_EXCEPTION_IF_NULL(node_prim);
  568. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  569. continue;
  570. }
  571. if (IsParallelCareNode(use_cnode) && use_cnode->has_user_data<OperatorInfo>()) {
  572. Redistribution(node_pair, distribute_operator, insert_node_new, node_pair.second, tensor_redistribution,
  573. pre_node);
  574. } else {
  575. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  576. }
  577. }
  578. }
  579. }
  580. void SplitTensor(const AnfNodePtr &node, const CNodePtr &next_node, int64_t index) {
  581. MS_EXCEPTION_IF_NULL(node);
  582. MS_EXCEPTION_IF_NULL(next_node);
  583. OperatorInfoPtr op_info = next_node->user_data<OperatorInfo>();
  584. MS_EXCEPTION_IF_NULL(op_info);
  585. // If the shape of tensor is [] or [1], no need to split it.
  586. Shapes shapes = GetNodeShape(node);
  587. if (shapes.size() != 1) {
  588. MS_LOG(EXCEPTION) << "Split tensor for " << op_info->name()
  589. << ": GetNodeShape for tensor_node, output size is not 1";
  590. }
  591. Shape shape = shapes[0];
  592. std::string shape_str = ShapeToString(shape);
  593. if (shape.empty() || ((shape.size() == 1) && (shape[0] == 1))) {
  594. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape is " << shape_str
  595. << ", no need to split it.";
  596. return;
  597. }
  598. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape of tensor is " << shape_str;
  599. // extract tensor layout
  600. if (LongToSize(index - 1) >= op_info->inputs_tensor_info().size()) {
  601. MS_LOG(EXCEPTION) << "The index is out of range, index is " << index - 1 << ", vector size is "
  602. << op_info->inputs_tensor_info().size();
  603. }
  604. TensorInfo tensor_info = op_info->inputs_tensor_info()[LongToSize(index - 1)];
  605. TensorLayout tensor_layout = tensor_info.tensor_layout();
  606. // Use _GetTensorSlice operator to split the tensor
  607. FuncGraphPtr func_graph = next_node->func_graph(); // only cnode can get the graph
  608. MS_EXCEPTION_IF_NULL(func_graph);
  609. Operator op = CreateGetTensorSliceOp(tensor_layout);
  610. InsertGetTensorSliceOp(op, next_node, func_graph, index, SPLIT_TENSOR);
  611. if (!op_info->sub_ops().empty()) {
  612. auto sub_ops = op_info->sub_ops();
  613. for (size_t i = 0; i < sub_ops.size(); i++) {
  614. if (!sub_ops.at(i).empty()) {
  615. InsertGetTensorSliceOp(sub_ops.at(i).at(0), next_node, func_graph, index, SUB);
  616. }
  617. }
  618. }
  619. }
  620. void SplitTensorList(const AnfNodePtr &node, const CNodePtr &next_node, int index) {
  621. MS_EXCEPTION_IF_NULL(node);
  622. MS_EXCEPTION_IF_NULL(next_node);
  623. if (next_node->inputs().size() != 2 || index != 1) {
  624. MS_LOG(INFO) << next_node->fullname_with_scope() << " Inputs must have only one input, get "
  625. << next_node->inputs().size() - 1 << " index should be 1, get " << index;
  626. return;
  627. }
  628. OperatorInfoPtr op_info = next_node->user_data<OperatorInfo>();
  629. MS_EXCEPTION_IF_NULL(op_info);
  630. std::vector<ValuePtr> inputs_values;
  631. if (IsValueNode<ValueList>(node)) {
  632. inputs_values = node->cast<ValueNodePtr>()->value()->cast<ValueListPtr>()->value();
  633. } else {
  634. inputs_values = node->cast<ValueNodePtr>()->value()->cast<ValueTuplePtr>()->value();
  635. }
  636. if (inputs_values.size() != op_info->inputs_tensor_info().size()) {
  637. MS_LOG(EXCEPTION) << "The inputs size " << inputs_values.size() << ", is not equal to inputs shape size "
  638. << op_info->inputs_tensor_info().size();
  639. }
  640. std::vector<AnfNodePtr> make_tuple_inputs = {NewValueNode(prim::kPrimMakeTuple)};
  641. FuncGraphPtr func_graph = next_node->func_graph();
  642. MS_EXCEPTION_IF_NULL(func_graph);
  643. FuncGraphManagerPtr manager = func_graph->manager();
  644. MS_EXCEPTION_IF_NULL(manager);
  645. ScopePtr scope = next_node->scope();
  646. MS_EXCEPTION_IF_NULL(scope);
  647. for (size_t i = 0; i < inputs_values.size(); ++i) {
  648. auto value_ptr = inputs_values[i];
  649. auto tensor = value_ptr->cast<tensor::TensorPtr>();
  650. MS_EXCEPTION_IF_NULL(tensor);
  651. TensorInfo tensor_info = op_info->inputs_tensor_info()[i];
  652. TensorLayout tensor_layout = tensor_info.tensor_layout();
  653. auto value_node = NewValueNode(value_ptr)->cast<AnfNodePtr>();
  654. Operator op = CreateGetTensorSliceOp(tensor_layout);
  655. std::vector<AnfNodePtr> node_input = CreateInput(op, value_node, SPLIT_TENSOR);
  656. CNodePtr new_node = func_graph->NewCNode(node_input);
  657. new_node->set_in_forward_flag(true);
  658. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  659. MS_EXCEPTION_IF_NULL(new_node_value);
  660. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  661. new_node_prim->set_instance_name(SPLIT_TENSOR);
  662. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  663. new_node->set_scope(scope);
  664. node_input[0]->set_scope(scope);
  665. make_tuple_inputs.push_back(new_node);
  666. }
  667. CNodePtr make_tuple = func_graph->NewCNode(make_tuple_inputs);
  668. manager->Replace(node, make_tuple);
  669. }
  670. void StepSplitTensor(const AnfNodePtr &node, const FuncGraphManagerPtr &manager) {
  671. MS_EXCEPTION_IF_NULL(node);
  672. MS_EXCEPTION_IF_NULL(manager);
  673. AnfNodeIndexSet node_set = manager->node_users()[node];
  674. for (auto &node_pair : node_set) {
  675. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  676. if (use_cnode == nullptr || !IsValueNode<Primitive>(use_cnode->input(0))) {
  677. continue;
  678. }
  679. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  680. MS_EXCEPTION_IF_NULL(prim_anf_node);
  681. PrimitivePtr use_cnode_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  682. MS_EXCEPTION_IF_NULL(use_cnode_prim);
  683. if (use_cnode_prim->name() == DEPEND && node_pair.second != 1) {
  684. continue;
  685. }
  686. if (IsParallelCareNode(use_cnode)) {
  687. if (IsValueNode<ValueList>(node) || IsValueNode<ValueTuple>(node)) {
  688. SplitTensorList(node, use_cnode, node_pair.second);
  689. } else {
  690. SplitTensor(node, use_cnode, node_pair.second);
  691. }
  692. }
  693. }
  694. }
  695. std::vector<AnfNodePtr> ReplaceOpInput(const Operator &replace_op, const std::string &instance_name,
  696. const CNodePtr &node) {
  697. OperatorArgs arg_replace_op = replace_op.second;
  698. ValuePtr pyop_instance = CreatOpInstance(arg_replace_op.first, replace_op.first, instance_name);
  699. if (pyop_instance == nullptr) {
  700. MS_LOG(EXCEPTION) << "Failure: " << replace_op.first << " CreatOpInstance failed";
  701. }
  702. OperatorParams params = arg_replace_op.second;
  703. if (node->inputs().size() < 2) {
  704. // GetNext operator dose not has input
  705. if (node->inputs().size() == 1) {
  706. return {NewValueNode(pyop_instance)};
  707. }
  708. MS_LOG(EXCEPTION) << "Failure: " << node->ToString() << " size is smaller than 2";
  709. }
  710. std::vector<AnfNodePtr> replace_input = {NewValueNode(pyop_instance), node->input(1)};
  711. if (replace_op.first == EMBEDDING_LOOKUP) {
  712. replace_input = {NewValueNode(pyop_instance), node->input(1), node->input(2)};
  713. }
  714. if (!params.empty()) {
  715. Param param_first = *(params.begin());
  716. int64_t first_position = param_first.second;
  717. if (first_position == 1) {
  718. replace_input.pop_back();
  719. }
  720. for (auto &param : params) {
  721. AnfNodePtr val = NewValueNode(param.first.second);
  722. if (val == nullptr) {
  723. MS_LOG(EXCEPTION) << "Failure:val is nullptr";
  724. }
  725. int64_t position = param.second;
  726. (void)replace_input.insert(replace_input.begin() + position, val);
  727. }
  728. }
  729. return replace_input;
  730. }
  731. void ReplaceOneOp(const Operator &replace_op, const CNodePtr &node) {
  732. FuncGraphPtr func_graph = node->func_graph();
  733. MS_EXCEPTION_IF_NULL(func_graph);
  734. FuncGraphManagerPtr manager = func_graph->manager();
  735. if (manager == nullptr) {
  736. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  737. }
  738. std::string instance_name = CreateInstanceName(node, 0);
  739. std::vector<AnfNodePtr> replace_input;
  740. replace_input = ReplaceOpInput(replace_op, instance_name, node);
  741. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  742. MS_EXCEPTION_IF_NULL(replace_node);
  743. ScopePtr scope = node->scope();
  744. MS_EXCEPTION_IF_NULL(scope);
  745. replace_node->set_scope(scope);
  746. replace_node->set_in_forward_flag(true);
  747. replace_input[0]->set_scope(scope);
  748. (void)manager->Replace(node, replace_node);
  749. }
  750. void StepReplaceOp(OperatorVector replace_op, const CNodePtr &node) {
  751. // step1:get graph manager distribute_operator
  752. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  753. if (distribute_operator == nullptr) {
  754. MS_LOG(EXCEPTION) << "Failure:AddNode error since distribute_operator is nullptr";
  755. }
  756. FuncGraphPtr func_graph = node->func_graph();
  757. MS_EXCEPTION_IF_NULL(func_graph);
  758. FuncGraphManagerPtr manager = func_graph->manager();
  759. if (manager == nullptr) {
  760. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  761. }
  762. // step2:traverse op_list and insert node
  763. std::reverse(replace_op.begin(), replace_op.end());
  764. auto replace_op_info = distribute_operator->replace_op_info();
  765. std::reverse(replace_op_info.begin(), replace_op_info.end());
  766. if (!replace_op_info.empty() && replace_op_info.size() != replace_op.size()) {
  767. MS_LOG(EXCEPTION) << "replace_op_info is not empty and size not equal to replace_op!";
  768. }
  769. bool replace_op_info_flag = !replace_op_info.empty();
  770. for (size_t index = 0; index < replace_op.size(); ++index) {
  771. std::string instance_name = CreateInstanceName(node, index);
  772. std::vector<AnfNodePtr> replace_input;
  773. if (index != replace_op.size() - 1) {
  774. replace_input = CreateInput(replace_op[index], node, instance_name);
  775. } else {
  776. replace_input = ReplaceOpInput(replace_op[index], instance_name, node);
  777. }
  778. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  779. MS_EXCEPTION_IF_NULL(replace_node);
  780. ScopePtr scope = node->scope();
  781. MS_EXCEPTION_IF_NULL(scope);
  782. replace_node->set_scope(scope);
  783. PrimitivePtr prim = GetValueNode<PrimitivePtr>(replace_node->input(0));
  784. if (prim->name() == EMBEDDING_LOOKUP) {
  785. auto attrs = prim->attrs();
  786. attrs[TARGET] = MakeValue(CPU);
  787. (void)prim->SetAttrs(attrs);
  788. }
  789. if (index == replace_op.size() - 1) {
  790. replace_node->set_user_data<OperatorInfo>(node->user_data<OperatorInfo>());
  791. }
  792. replace_node->set_in_forward_flag(true);
  793. replace_input[0]->set_scope(scope);
  794. if (replace_op_info_flag && replace_op_info[index].first) {
  795. auto new_cnode = InsertMakeTuple(replace_node, replace_op_info[index].second, func_graph);
  796. (void)manager->Replace(node, new_cnode); // using Replace function to insert node
  797. } else {
  798. (void)manager->Replace(node, replace_node); // using Replace function to insert node
  799. }
  800. }
  801. MS_LOG(INFO) << "Insert ReplaceOp success for " << distribute_operator->name();
  802. }
  803. bool IsSomePrimitive(const CNodePtr &cnode, const std::string &name) {
  804. ValueNodePtr anf_node = cnode->input(0)->cast<ValueNodePtr>();
  805. MS_EXCEPTION_IF_NULL(anf_node);
  806. PrimitivePtr prim = anf_node->value()->cast<PrimitivePtr>();
  807. return (prim->name() == name);
  808. }
  809. void StepReplaceGraph(const ReplaceGraphPtr &replace_graph, const CNodePtr &node) {
  810. MS_EXCEPTION_IF_NULL(replace_graph);
  811. MS_EXCEPTION_IF_NULL(node);
  812. MS_EXCEPTION_IF_NULL(replace_graph->second);
  813. FuncGraphPtr func_graph = node->func_graph();
  814. MS_EXCEPTION_IF_NULL(func_graph);
  815. FuncGraphManagerPtr manager = func_graph->manager();
  816. if (manager == nullptr) {
  817. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  818. }
  819. // Solve the input order
  820. // For example input_node:{segment_sum:1, segment_sum:2, gahter:2}
  821. // The Original code here will bind the all operations to the first inputs of these operatos
  822. // However, the segment_sum operation needs two inputs, To solve this
  823. // We maintain a dict to count the times of the same operations,
  824. // and bind the inputs according to the times of the op appears.
  825. static std::unordered_map<AnfNodePtr, int> input_map = {};
  826. static int appear_count = 0;
  827. for (auto &replace_input : replace_graph->first) {
  828. auto pre_node = node->input(LongToSize(replace_input.second));
  829. auto it = input_map.find(replace_input.first);
  830. if (it != input_map.end()) {
  831. appear_count = 1 + it->second;
  832. } else {
  833. appear_count = 1;
  834. }
  835. input_map[replace_input.first] = appear_count;
  836. manager->SetEdge(replace_input.first, appear_count, pre_node);
  837. }
  838. // "(void)manager->Replace(replace_graph->first, pre_node);" can not be called
  839. auto replace_output = replace_graph->second;
  840. MS_EXCEPTION_IF_NULL(replace_output);
  841. (void)manager->Replace(node, replace_output);
  842. }
  843. int64_t GetTupleGetItemIndex(const CNodePtr &cnode) {
  844. MS_EXCEPTION_IF_NULL(cnode);
  845. if (cnode->inputs().size() != 3) {
  846. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is not 3";
  847. }
  848. if (!cnode->input(2)->isa<ValueNode>()) {
  849. MS_LOG(EXCEPTION) << "The index of tuple getitem is not a value node";
  850. }
  851. ValuePtr tuple_index_value = GetValueNode(cnode->input(2));
  852. MS_EXCEPTION_IF_NULL(tuple_index_value);
  853. if (!tuple_index_value->isa<Int64Imm>()) {
  854. MS_LOG(EXCEPTION) << "The index of tuple getitem is not int32";
  855. }
  856. return tuple_index_value->cast<Int64ImmPtr>()->value();
  857. }
  858. void InsertVirtualDivOp(const VirtualDivOp &virtual_div_op, const CNodePtr &node) {
  859. MS_EXCEPTION_IF_NULL(node);
  860. size_t node_size = node->inputs().size();
  861. FuncGraphPtr func_graph = node->func_graph();
  862. MS_EXCEPTION_IF_NULL(func_graph);
  863. FuncGraphManagerPtr manager = func_graph->manager();
  864. MS_EXCEPTION_IF_NULL(manager);
  865. for (size_t index = 1; index < node_size; ++index) {
  866. AnfNodePtr input = node->input(index);
  867. MS_EXCEPTION_IF_NULL(input);
  868. if (!input->isa<CNode>() && !input->isa<Parameter>()) { // if it is not a tensor, continue
  869. MS_LOG(INFO) << "insert div op: the index " << index << " is not tensor, skip";
  870. continue;
  871. }
  872. for (size_t pos = 0; pos < virtual_div_op.size(); ++pos) {
  873. std::string instance_name = CreateInstanceName(node, pos);
  874. InsertNode(virtual_div_op[pos], node, index, node->input(index), func_graph, instance_name);
  875. }
  876. MS_LOG(INFO) << "insert div op for input index " << index << " of node";
  877. }
  878. }
  879. // Only used for InsertMirrorOps
  880. std::pair<AnfNodePtr, bool> FindParameter(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  881. if (!node->isa<Parameter>() && !node->isa<CNode>() && !node->isa<ValueNode>()) {
  882. return std::make_pair(nullptr, false);
  883. } else if (node->isa<Parameter>()) {
  884. auto param_ptr = node->user_data<parallel::TensorLayout>();
  885. if (param_ptr != nullptr && !param_ptr->opt_shard_group().empty()) {
  886. return std::make_pair(nullptr, false);
  887. } else {
  888. return std::make_pair(node, false);
  889. }
  890. } else if (node->isa<ValueNode>()) {
  891. if (IsValueNode<RefKey>(node)) {
  892. std::vector<AnfNodePtr> param_v = FindParameterByRefKeyNode(node, func_graph);
  893. if (param_v.size() != 1) {
  894. MS_LOG(EXCEPTION) << "FindParameterByRefKeyNode failed, return vector size must be 1, real is "
  895. << param_v.size();
  896. }
  897. auto param_ptr = param_v[0]->user_data<parallel::TensorLayout>();
  898. if (param_ptr != nullptr && !param_ptr->opt_shard_group().empty()) {
  899. return std::make_pair(nullptr, true);
  900. } else {
  901. return std::make_pair(node, true);
  902. }
  903. }
  904. return std::make_pair(nullptr, false);
  905. } else {
  906. CNodePtr cnode = node->cast<CNodePtr>();
  907. MS_EXCEPTION_IF_NULL(cnode);
  908. if (!IsValueNode<Primitive>(cnode->input(0))) {
  909. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  910. if (!FindParameter(cnode->input(index), func_graph).first) {
  911. continue;
  912. }
  913. return FindParameter(cnode->input(index), func_graph);
  914. }
  915. } else {
  916. if (IsSomePrimitive(cnode, RECEIVE) && !cnode->has_user_data<OperatorInfo>()) {
  917. return std::make_pair(node, false);
  918. }
  919. if (IsParallelCareNode(cnode)) {
  920. return std::make_pair(nullptr, false);
  921. } else {
  922. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  923. MS_EXCEPTION_IF_NULL(prim_anf_node);
  924. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  925. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  926. MS_EXCEPTION_IF_NULL(prim);
  927. if (prim->name() == DEPEND && index != 1) {
  928. continue;
  929. }
  930. if (!FindParameter(cnode->input(index), func_graph).first) {
  931. continue;
  932. }
  933. return FindParameter(cnode->input(index), func_graph);
  934. }
  935. }
  936. }
  937. }
  938. return std::make_pair(nullptr, false);
  939. }
  940. std::pair<bool, CNodePtr> FindCNode(const AnfNodePtr &anode, const std::string &name, const FuncGraphPtr &func_graph) {
  941. MS_EXCEPTION_IF_NULL(anode);
  942. MS_EXCEPTION_IF_NULL(anode->func_graph());
  943. FuncGraphManagerPtr manager = anode->func_graph()->manager();
  944. MS_EXCEPTION_IF_NULL(manager);
  945. AnfNodeIndexSet node_set = manager->node_users()[anode];
  946. bool result = false;
  947. CNodePtr cnode_return = nullptr;
  948. for (auto &node_pair : node_set) {
  949. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  950. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  951. continue;
  952. }
  953. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  954. MS_EXCEPTION_IF_NULL(prim_anf_node);
  955. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  956. MS_EXCEPTION_IF_NULL(node_prim);
  957. if (node_prim->name() == name && node_pair.second == 1) {
  958. if (use_apply->func_graph() == func_graph) {
  959. result = true;
  960. cnode_return = use_apply;
  961. MS_LOG(INFO) << "Find Primitive " << name << " in the same func_graph";
  962. continue;
  963. }
  964. MS_LOG(INFO) << "Find Primitive " << name << " in different func_graph";
  965. }
  966. }
  967. return std::make_pair(result, cnode_return);
  968. }
  969. bool IsCastBeforMirror(const CNodePtr &node, size_t index) {
  970. // only if gradient_fp32_sync is true, pre node is cast and type is not float32 return true
  971. if (!ParallelContext::GetInstance()->gradient_fp32_sync()) {
  972. return false;
  973. }
  974. auto pre_node = node->input(index);
  975. MS_EXCEPTION_IF_NULL(pre_node);
  976. auto cnode = pre_node->cast<CNodePtr>();
  977. if (cnode == nullptr || !IsValueNode<Primitive>(cnode->input(0))) {
  978. return false;
  979. }
  980. auto pre_value_node = cnode->input(0)->cast<ValueNodePtr>();
  981. MS_EXCEPTION_IF_NULL(pre_value_node);
  982. auto pre_prim = pre_value_node->value()->cast<PrimitivePtr>();
  983. MS_EXCEPTION_IF_NULL(pre_prim);
  984. if (pre_prim->name() != CAST) {
  985. return false;
  986. }
  987. auto node_type = pre_node->Type();
  988. MS_EXCEPTION_IF_NULL(node_type);
  989. if (!node_type->isa<mindspore::TensorType>()) {
  990. MS_LOG(EXCEPTION) << "Unknown type.";
  991. }
  992. auto input_element_type = node_type->cast<mindspore::TensorTypePtr>()->element();
  993. MS_EXCEPTION_IF_NULL(input_element_type);
  994. auto type_id = input_element_type->type_id();
  995. return (type_id != kNumberTypeFloat32);
  996. }
  997. static void AddCommOpFusionType(const CNodePtr &comm_node, const AnfNodePtr &param_node) {
  998. MS_EXCEPTION_IF_NULL(comm_node);
  999. MS_EXCEPTION_IF_NULL(param_node);
  1000. if (IsPrimitiveCNode(param_node, prim::kPrimReceive)) {
  1001. MS_LOG(WARNING) << "The mirror of Receive does not support fusion type now.";
  1002. return;
  1003. }
  1004. auto param = param_node->cast<ParameterPtr>();
  1005. MS_EXCEPTION_IF_NULL(param);
  1006. auto prim = GetValueNode<PrimitivePtr>(comm_node->input(0));
  1007. MS_EXCEPTION_IF_NULL(prim);
  1008. auto attrs = prim->attrs();
  1009. auto param_info = param->param_info();
  1010. if (!param_info) {
  1011. MS_LOG(WARNING) << param->ToString() << "does not have parameter info.";
  1012. return;
  1013. }
  1014. int32_t fusion_type = param_info->comm_fusion();
  1015. attrs[FUSION] = MakeValue<int64_t>(fusion_type);
  1016. prim->SetAttrs(attrs);
  1017. MS_LOG(INFO) << "Set comm fusion:" << param->param_info()->name() << "'s fusion type is " << fusion_type;
  1018. }
  1019. void InsertMirrorOps(const FuncGraphPtr &root, const MirrorOps &mirror_ops, const CNodePtr &node) {
  1020. MS_EXCEPTION_IF_NULL(node);
  1021. size_t node_size = node->inputs().size();
  1022. FuncGraphPtr func_graph = node->func_graph();
  1023. MS_EXCEPTION_IF_NULL(func_graph);
  1024. FuncGraphManagerPtr manager = func_graph->manager();
  1025. MS_EXCEPTION_IF_NULL(manager);
  1026. if ((node->inputs().size() == 2) && (IsValueNode<ValueSequeue>(node->input(1)))) {
  1027. MS_LOG(INFO) << "Input is ValueList, skip it.";
  1028. return;
  1029. }
  1030. if ((node->inputs().size() == 2) &&
  1031. (AnfNodeIsPrimitive(node->input(1), MAKE_TUPLE) || AnfNodeIsPrimitive(node->input(1), MAKE_LIST))) {
  1032. MS_LOG(INFO) << "The mirror for " << GetPrimName(node) << " has handle by make_tuple node";
  1033. return;
  1034. }
  1035. if (mirror_ops.size() != node_size - 1) {
  1036. MS_LOG(EXCEPTION) << "Mirrorops's size is wrong! mirror_ops size is " << mirror_ops.size() << ", node_size is "
  1037. << node_size - 1;
  1038. }
  1039. for (size_t index = 1; index < node_size; ++index) {
  1040. OperatorVector backward_op = mirror_ops[index - 1];
  1041. if (backward_op.empty()) {
  1042. continue;
  1043. }
  1044. std::pair<AnfNodePtr, bool> param_node_pair = FindParameter(node->input(index), func_graph);
  1045. if (!param_node_pair.first) {
  1046. continue;
  1047. }
  1048. auto param_ptr = param_node_pair.first->cast<ParameterPtr>();
  1049. std::string param_name;
  1050. if (param_ptr != nullptr) {
  1051. param_name = param_ptr->name();
  1052. }
  1053. // not a RefKey
  1054. if (!param_node_pair.second) {
  1055. auto next_cnode = FindCNode(param_node_pair.first, MIRROR_OPERATOR, func_graph);
  1056. // if there is already a MirrorOp in the same graph, use MirrorOp CNode as a input instead
  1057. if (next_cnode.first) {
  1058. MS_EXCEPTION_IF_NULL(next_cnode.second);
  1059. // param->cast->op, insert mirror before cast
  1060. if (node->input(index)->isa<CNode>()) {
  1061. auto pre_cnode = node->input(index)->cast<CNodePtr>();
  1062. auto pre_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1063. if (pre_prim->name() == CAST) {
  1064. manager->SetEdge(pre_cnode, 1, next_cnode.second);
  1065. continue;
  1066. }
  1067. }
  1068. manager->SetEdge(node, SizeToLong(index), next_cnode.second);
  1069. continue;
  1070. }
  1071. }
  1072. // if the parameter found is a RefKey, or no MirrorOp is found in the same graph, insert a new MirrorOp
  1073. // only one MirrorOp in backward_op
  1074. if (backward_op.size() != 1) {
  1075. MS_LOG(EXCEPTION) << "backward_op size must be 1, real is " << backward_op.size();
  1076. }
  1077. std::string instance_name = MIRROR_OP;
  1078. if (IsCastBeforMirror(node, index)) {
  1079. for (auto &op : backward_op) {
  1080. // insert new node before the node
  1081. CNodePtr cnode = node->input(index)->cast<CNodePtr>();
  1082. MS_EXCEPTION_IF_NULL(cnode);
  1083. AnfNodePtr pre_node = cnode->input(1);
  1084. InsertMirrorNode(root, op, cnode, size_t(1), pre_node, func_graph, instance_name, param_name);
  1085. auto comm_op = cnode->input(size_t(1))->cast<CNodePtr>();
  1086. // add fusion flag
  1087. // pipeline mirror would not be set, which should be supported later
  1088. AddCommOpFusionType(comm_op, param_node_pair.first);
  1089. }
  1090. } else {
  1091. for (auto &op : backward_op) {
  1092. AnfNodePtr pre_node = node->input(index);
  1093. InsertMirrorNode(root, op, node, index, pre_node, func_graph, instance_name, param_name);
  1094. auto comm_op = node->input(index)->cast<CNodePtr>();
  1095. // add fusion flag
  1096. // pipeline mirror would not be set, which should be supported later
  1097. AddCommOpFusionType(comm_op, param_node_pair.first);
  1098. }
  1099. }
  1100. }
  1101. }
  1102. void BackwardCommunication(const FuncGraphPtr &root, const OperatorInfoPtr &distribute_operator, const CNodePtr &node,
  1103. const std::vector<std::pair<CNodePtr, LossNodeInfo>> &sens_loss_pairs) {
  1104. MS_EXCEPTION_IF_NULL(distribute_operator);
  1105. MS_EXCEPTION_IF_NULL(node);
  1106. bool is_loss_cnode =
  1107. std::any_of(sens_loss_pairs.begin(), sens_loss_pairs.end(),
  1108. [node](const std::pair<CNodePtr, LossNodeInfo> &element) { return element.second.loss_node == node; });
  1109. MirrorOps mirror_ops = distribute_operator->mirror_ops();
  1110. VirtualDivOp virtual_div_op = distribute_operator->virtual_div_op();
  1111. // insert mirror op
  1112. if (!mirror_ops.empty()) {
  1113. MS_LOG(INFO) << "insert mirror op for " << distribute_operator->name();
  1114. InsertMirrorOps(root, mirror_ops, node);
  1115. }
  1116. // insert virtual div op
  1117. if (!virtual_div_op.empty() && is_loss_cnode) {
  1118. MS_LOG(INFO) << "insert virtual div op for " << distribute_operator->name();
  1119. InsertVirtualDivOp(virtual_div_op, node);
  1120. }
  1121. }
  1122. std::string GetDisOpName(const std::string &prim_name) {
  1123. std::string op_name = prim_name;
  1124. if (!prim_name.empty() && (prim_name[0] == '_')) {
  1125. op_name = prim_name.substr(1);
  1126. }
  1127. return op_name + "Info";
  1128. }
  1129. OperatorInfoPtr OperatorInstanceByName(const std::string &name, const PrimitiveAttrs &attrs,
  1130. const std::vector<Shapes> &shape_list) {
  1131. if (shape_list.size() != 2) {
  1132. MS_LOG(ERROR) << "The size of shape list is not 2";
  1133. return nullptr;
  1134. }
  1135. if (name.length() == 0) {
  1136. MS_LOG(EXCEPTION) << "Length of name is zero!";
  1137. }
  1138. std::string distribute_opname = GetDisOpName(name);
  1139. if (name == GATHERV2) {
  1140. distribute_opname = name + "PInfo";
  1141. auto data_parallel_iter = attrs.find(DATA_PARALLEL);
  1142. if (data_parallel_iter != attrs.end()) {
  1143. MS_EXCEPTION_IF_NULL(data_parallel_iter->second);
  1144. if (!data_parallel_iter->second->isa<BoolImm>()) {
  1145. MS_LOG(EXCEPTION) << ": data_parallel flag's type is not a bool.";
  1146. }
  1147. bool data_parallel = data_parallel_iter->second->cast<BoolImmPtr>()->value();
  1148. if (data_parallel) {
  1149. distribute_opname = name + "Info";
  1150. }
  1151. }
  1152. }
  1153. OperatorInfoPtr operator_ =
  1154. (OperatorInfoPtr)DynCreator::Instance().Create(distribute_opname, shape_list[0], shape_list[1], attrs, TOTAL_OPS);
  1155. if (operator_ == nullptr) {
  1156. MS_LOG(INFO) << "Create " << name << " failed";
  1157. return nullptr;
  1158. }
  1159. std::string origin_name = operator_->name();
  1160. operator_->set_name(origin_name + std::to_string(TOTAL_OPS));
  1161. MS_LOG(INFO) << "Successfully created operator " << origin_name;
  1162. ++TOTAL_OPS;
  1163. return operator_;
  1164. }
  1165. OperatorInfoPtr OperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  1166. const std::vector<Shapes> &shape_list) {
  1167. MS_EXCEPTION_IF_NULL(prim);
  1168. OperatorInfoPtr operator_ = OperatorInstanceByName(prim->name(), attrs, shape_list);
  1169. if (operator_ == nullptr) {
  1170. if (IsInBatchParallelBlackList(prim)) {
  1171. MS_LOG(EXCEPTION) << "Operator " << prim->name() << " is not supported yet in auto parallel mode.";
  1172. }
  1173. MS_LOG(INFO) << "Create " << prim->name() << " failed, use batch parallel";
  1174. operator_ = OperatorInstanceByName(BATCH_PARALLEL, attrs, shape_list);
  1175. MS_EXCEPTION_IF_NULL(operator_);
  1176. }
  1177. return operator_;
  1178. }
  1179. OperatorInfoPtr NewOperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  1180. std::vector<Shapes> shape_list) {
  1181. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1182. for (size_t i = 0; i < shape_list[0].size(); ++i) {
  1183. MS_LOG(INFO) << "No: " << i << " input's shape: " << ShapeToString(shape_list[0][i]);
  1184. }
  1185. return operator_;
  1186. }
  1187. StrategyPtr ExtractStrategy(std::unordered_map<std::string, ValuePtr> attrs) {
  1188. ValueTuplePtr var = attrs[STRATEGY]->cast<ValueTuplePtr>();
  1189. StrategyPtr strategyPtr;
  1190. int64_t stage_id = g_device_manager->stage_id();
  1191. MS_LOG(INFO) << "Extract information: strategy " << attrs[STRATEGY]->ToString();
  1192. if (var == nullptr) {
  1193. MS_LOG(EXCEPTION) << "Strategy value is nullptr";
  1194. }
  1195. if (var->size() > 0) {
  1196. std::vector<ValuePtr> elements = var->value();
  1197. Strategys strategy;
  1198. for (uint64_t index = 0; index < elements.size(); ++index) {
  1199. Dimensions dim;
  1200. if (elements[index]->isa<ValueSequeue>()) {
  1201. ValueTuplePtr value_tuple = elements[index]->cast<ValueTuplePtr>();
  1202. std::vector<ValuePtr> value_vector = value_tuple->value();
  1203. (void)std::transform(value_vector.begin(), value_vector.end(), std::back_inserter(dim),
  1204. [](const ValuePtr &value) { return static_cast<int64_t>(GetValue<int64_t>(value)); });
  1205. strategy.push_back(dim);
  1206. } else {
  1207. MS_LOG(EXCEPTION) << "Failure: Strategy's format is wrong! Need ValueSequence";
  1208. }
  1209. }
  1210. if (strategy.empty()) {
  1211. MS_LOG(EXCEPTION) << "ExtractStrategy: failed to extract strategy";
  1212. }
  1213. strategyPtr = NewStrategy(stage_id, strategy);
  1214. }
  1215. return strategyPtr;
  1216. }
  1217. Shapes GetValueListShape(const AnfNodePtr &node) {
  1218. Shapes shapes;
  1219. std::vector<ValuePtr> inputs_seq;
  1220. if (IsValueNode<ValueList>(node)) {
  1221. inputs_seq = node->cast<ValueNodePtr>()->value()->cast<ValueListPtr>()->value();
  1222. } else if (IsValueNode<ValueTuple>(node)) {
  1223. inputs_seq = node->cast<ValueNodePtr>()->value()->cast<ValueTuplePtr>()->value();
  1224. } else {
  1225. MS_LOG(EXCEPTION) << "node is eigther ValueList or ValueTuple";
  1226. }
  1227. for (auto &ele : inputs_seq) {
  1228. auto tensor = ele->cast<tensor::TensorPtr>();
  1229. MS_EXCEPTION_IF_NULL(tensor);
  1230. auto one_shape = tensor->shape();
  1231. shapes.push_back(one_shape);
  1232. }
  1233. return shapes;
  1234. }
  1235. Shapes GetNodeShape(const AnfNodePtr &node) {
  1236. MS_EXCEPTION_IF_NULL(node);
  1237. Shapes shapes;
  1238. if (IsValueNode<ValueList>(node) || IsValueNode<ValueTuple>(node)) {
  1239. return GetValueListShape(node);
  1240. }
  1241. BaseShapePtr base_shape_ptr = node->Shape();
  1242. if (node->isa<CNode>()) {
  1243. auto cnode = node->cast<CNodePtr>();
  1244. if (IsValueNode<Primitive>(cnode->input(0))) {
  1245. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1246. MS_EXCEPTION_IF_NULL(prim);
  1247. if (prim->name() == MAKEREF) {
  1248. AnfNodePtr ref_node = cnode->input(1);
  1249. auto func_graph = cnode->func_graph();
  1250. MS_EXCEPTION_IF_NULL(ref_node);
  1251. MS_EXCEPTION_IF_NULL(func_graph);
  1252. return GetRefKeyNodeShape(ref_node, func_graph);
  1253. }
  1254. }
  1255. if (cnode->input(0)->isa<CNode>()) {
  1256. if (cnode->inputs().size() < 2) {
  1257. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " size is smaller than 2";
  1258. }
  1259. base_shape_ptr = cnode->input(1)->Shape();
  1260. }
  1261. }
  1262. if (base_shape_ptr == nullptr) {
  1263. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " shape_ptr is nullptr, full name is "
  1264. << node->fullname_with_scope();
  1265. }
  1266. auto tuple_shape_ptr = dyn_cast<abstract::SequeueShape>(base_shape_ptr);
  1267. if (tuple_shape_ptr != nullptr) {
  1268. auto tuple_shape = tuple_shape_ptr->shape();
  1269. for (auto &shape : tuple_shape) {
  1270. auto each_shape = dyn_cast<abstract::Shape>(shape);
  1271. MS_EXCEPTION_IF_NULL(each_shape);
  1272. shapes.push_back(each_shape->shape());
  1273. }
  1274. } else {
  1275. auto shape_ptr = dyn_cast<abstract::Shape>(base_shape_ptr);
  1276. MS_EXCEPTION_IF_NULL(shape_ptr);
  1277. shapes.push_back(shape_ptr->shape());
  1278. }
  1279. return shapes;
  1280. }
  1281. Shapes GetRefKeyNodeShape(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1282. MS_EXCEPTION_IF_NULL(node);
  1283. MS_EXCEPTION_IF_NULL(func_graph);
  1284. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(node, func_graph);
  1285. if (parameters.size() != 1) {
  1286. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1287. }
  1288. Shapes input_shapes;
  1289. input_shapes = GetNodeShape(parameters[0]);
  1290. if (input_shapes.size() != 1) {
  1291. MS_LOG(EXCEPTION) << "Get input shape failed";
  1292. }
  1293. MS_LOG(INFO) << "The parameter shape is " << ShapeToString(input_shapes[0]);
  1294. return input_shapes;
  1295. }
  1296. std::vector<Shapes> ExtractShape(const CNodePtr &node) {
  1297. MS_EXCEPTION_IF_NULL(node);
  1298. Shapes shape_inputs, shape_outputs;
  1299. std::vector<Shapes> shape_all;
  1300. std::vector<AnfNodePtr> all_inputs = node->inputs();
  1301. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  1302. size_t inputs_size = all_inputs.size();
  1303. for (size_t i = 1; i < inputs_size; ++i) {
  1304. Shapes input_shapes;
  1305. AnfNodePtr input = all_inputs[i];
  1306. if (IsValueNode<RefKey>(input)) {
  1307. auto func_graph = node->func_graph();
  1308. MS_EXCEPTION_IF_NULL(func_graph);
  1309. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(input, func_graph);
  1310. if (parameters.size() != 1) {
  1311. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1312. }
  1313. std::pair<AnfNodePtr, int64_t> node_pair = std::make_pair(node, SizeToLong(i));
  1314. g_RefMap[parameters[0]] = node_pair;
  1315. input_shapes = GetRefKeyNodeShape(input, func_graph);
  1316. } else if (IsValueNode<Tensor>(input) || input->isa<CNode>() || input->isa<Parameter>() ||
  1317. ((IsValueNode<ValueList>(input) || IsValueNode<ValueTuple>(input)) && (inputs_size == 2))) {
  1318. input_shapes = GetNodeShape(input);
  1319. } else {
  1320. continue;
  1321. }
  1322. if (input_shapes.size() != 1) {
  1323. if (inputs_size == 2) { // like concat
  1324. shape_inputs = input_shapes;
  1325. break;
  1326. } else {
  1327. MS_LOG(EXCEPTION) << "ExtractShape: Get input shape failed";
  1328. }
  1329. }
  1330. shape_inputs.push_back(input_shapes[0]);
  1331. }
  1332. shape_all.push_back(shape_inputs);
  1333. // extract out shape
  1334. shape_outputs = GetNodeShape(node);
  1335. shape_all.push_back(shape_outputs);
  1336. return shape_all;
  1337. }
  1338. std::pair<AnfNodePtr, int64_t> FindParallelCareNode(const AnfNodePtr &node, int32_t recursion_num) {
  1339. if (recursion_num >= RECURSION_LIMIT) {
  1340. return std::make_pair(nullptr, 0);
  1341. }
  1342. MS_EXCEPTION_IF_NULL(node);
  1343. FuncGraphPtr func_graph = node->func_graph();
  1344. MS_EXCEPTION_IF_NULL(func_graph);
  1345. FuncGraphManagerPtr manager = func_graph->manager();
  1346. MS_EXCEPTION_IF_NULL(manager);
  1347. AnfNodeIndexSet node_set = manager->node_users()[node];
  1348. for (auto &node_pair : node_set) {
  1349. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1350. MS_EXCEPTION_IF_NULL(cnode);
  1351. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1352. continue;
  1353. }
  1354. ValueNodePtr prim_node_anf = cnode->input(0)->cast<ValueNodePtr>();
  1355. MS_EXCEPTION_IF_NULL(prim_node_anf);
  1356. PrimitivePtr node_prim = prim_node_anf->value()->cast<PrimitivePtr>();
  1357. MS_EXCEPTION_IF_NULL(node_prim);
  1358. if ((node_prim->name() == DEPEND && node_pair.second != 1) || IsPrimitiveCNode(cnode, prim::kPrimReceive)) {
  1359. continue;
  1360. }
  1361. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1362. return node_pair;
  1363. } else {
  1364. auto tmp_pair = FindParallelCareNode(node_pair.first, recursion_num + 1);
  1365. if (tmp_pair.first != nullptr) {
  1366. return tmp_pair;
  1367. }
  1368. }
  1369. }
  1370. return std::make_pair(nullptr, 0);
  1371. }
  1372. std::pair<AnfNodePtr, int64_t> FindSubGraph(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  1373. MS_EXCEPTION_IF_NULL(graph);
  1374. MS_EXCEPTION_IF_NULL(parameter);
  1375. FuncGraphManagerPtr manager = graph->manager();
  1376. MS_EXCEPTION_IF_NULL(manager);
  1377. std::pair<AnfNodePtr, int64_t> prim_anf_node_pair = FindParallelCareNode(parameter, 0);
  1378. if (prim_anf_node_pair.first != nullptr) {
  1379. return prim_anf_node_pair;
  1380. } else {
  1381. AnfNodeIndexSet param_sub_set = manager->node_users()[parameter];
  1382. for (auto &param_pair : param_sub_set) {
  1383. CNodePtr param_cnode = param_pair.first->cast<CNodePtr>();
  1384. AnfNodePtr graph_value_node;
  1385. if (param_cnode->input(0)->isa<CNode>()) {
  1386. graph_value_node = param_cnode->input(0)->cast<CNodePtr>()->input(1);
  1387. } else {
  1388. graph_value_node = param_cnode->input(0);
  1389. }
  1390. if (!IsValueNode<FuncGraph>(graph_value_node)) {
  1391. continue;
  1392. }
  1393. FuncGraphPtr graph_sub = GetValueNode<FuncGraphPtr>(graph_value_node);
  1394. auto parameters = graph_sub->parameters();
  1395. if (LongToSize(param_pair.second - 1) >= parameters.size()) {
  1396. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1397. << parameters.size();
  1398. }
  1399. std::pair<AnfNodePtr, int64_t> res = FindSubGraph(graph_sub, parameters[LongToSize(param_pair.second - 1)]);
  1400. if (res.first != nullptr) {
  1401. return res;
  1402. }
  1403. }
  1404. }
  1405. return std::make_pair(nullptr, 0);
  1406. }
  1407. static void InsertAllGatherOp(const std::string &group, const std::pair<AnfNodePtr, int> &res,
  1408. const AnfNodePtr &parameter) {
  1409. Operator op = CreateAllGatherOp(group);
  1410. MS_EXCEPTION_IF_NULL(res.first);
  1411. MS_EXCEPTION_IF_NULL(parameter);
  1412. auto cnode = res.first->cast<CNodePtr>();
  1413. auto graph = cnode->func_graph();
  1414. MS_EXCEPTION_IF_NULL(graph);
  1415. auto cnode_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1416. MS_EXCEPTION_IF_NULL(cnode_prim);
  1417. CNodePtr allgather;
  1418. if (cnode_prim->name() == CAST) {
  1419. allgather = ReplaceNode(op, cnode, graph, PARALLEL_OPTIMIZER_ALLGATHER);
  1420. } else {
  1421. InsertNode(op, cnode, res.second, parameter, graph, PARALLEL_OPTIMIZER_ALLGATHER);
  1422. allgather = cnode->input(res.second)->cast<CNodePtr>();
  1423. }
  1424. MS_EXCEPTION_IF_NULL(allgather);
  1425. // add fusion flag
  1426. AddCommOpFusionType(allgather, parameter);
  1427. // add gradients mean
  1428. auto prim = GetValueNode<PrimitivePtr>(allgather->input(0));
  1429. auto attrs = prim->attrs();
  1430. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1431. bool mean_flag = ParallelContext::GetInstance()->gradients_mean();
  1432. attrs["mean_flag"] = MakeValue<bool>(mean_flag);
  1433. prim->SetAttrs(attrs);
  1434. }
  1435. static void ApplyParallelOptOnParam(const FuncGraphPtr &root, const AnfNodePtr &parameter,
  1436. const std::string &opt_shard_group) {
  1437. if (opt_shard_group.empty()) {
  1438. return;
  1439. }
  1440. FuncGraphManagerPtr manager = root->manager();
  1441. MS_EXCEPTION_IF_NULL(manager);
  1442. auto param_sub_set = manager->node_users()[parameter];
  1443. for (auto &param_pair : param_sub_set) {
  1444. auto cnode = param_pair.first->cast<CNodePtr>();
  1445. MS_EXCEPTION_IF_NULL(cnode);
  1446. if (cnode->in_forward_flag()) {
  1447. OperatorInfoPtr distribute_operator = cnode->user_data<OperatorInfo>();
  1448. if (distribute_operator == nullptr) {
  1449. MS_LOG(WARNING) << "Parallel optimizer: " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1450. } else if (IntToSize(param_pair.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1451. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1452. << distribute_operator->inputs_tensor_info().size();
  1453. }
  1454. // insert allgather operator between shard parameter and cnode
  1455. InsertAllGatherOp(opt_shard_group, param_pair, parameter);
  1456. MS_LOG(INFO) << "Parallel optimizer is applied between " << parameter->ToString() << " and " << cnode->ToString();
  1457. }
  1458. }
  1459. }
  1460. // When this function returns non-empty string, that means parallel optimizer is applied on this parameter.
  1461. std::string SetParallelShape(const AnfNodePtr &parameter, const std::pair<AnfNodePtr, int64_t> &res) {
  1462. MS_EXCEPTION_IF_NULL(parameter);
  1463. AbstractBasePtr abstract = parameter->abstract();
  1464. MS_EXCEPTION_IF_NULL(abstract);
  1465. MS_LOG(DEBUG) << "SetParallelShape " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1466. CNodePtr cnode = res.first->cast<CNodePtr>();
  1467. MS_EXCEPTION_IF_NULL(cnode);
  1468. OperatorInfoPtr distribute_operator = cnode->user_data<OperatorInfo>();
  1469. if (distribute_operator == nullptr) {
  1470. MS_LOG(EXCEPTION) << "Failure:node " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1471. }
  1472. if (LongToSize(res.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1473. MS_LOG(EXCEPTION) << "The index is out of range, index is " << res.second - 1 << ", vector size is "
  1474. << distribute_operator->inputs_tensor_info().size();
  1475. }
  1476. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[LongToSize(res.second - 1)];
  1477. TensorLayout tensor_layout = tensorinfo_in.tensor_layout();
  1478. Shape slice_shape = tensor_layout.slice_shape().array();
  1479. std::string opt_shard_group;
  1480. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1481. bool enable_parallel_optimizer = ParallelContext::GetInstance()->enable_parallel_optimizer();
  1482. if (enable_parallel_optimizer) {
  1483. if (!ParameterRequireGrad(parameter)) {
  1484. // only trainable parameters need parallel optimizer
  1485. MS_LOG(INFO) << "Parallel optimizer: " << parameter->ToString() << " is not trainable parameter.";
  1486. } else if (parameter->cast<ParameterPtr>()->param_info() &&
  1487. !parameter->cast<ParameterPtr>()->param_info()->parallel_optimizer()) {
  1488. MS_LOG(INFO) << "Parallel optimizer: " << parameter->ToString() << " does not need weight shard.";
  1489. } else if (tensor_layout.GenerateOptShardSliceShape() == Status::SUCCESS) {
  1490. // get a totally shard tensor slice shape if the weight is repeated on devices
  1491. // and the shape of the first dimension could be divided
  1492. // apply parallel optimizer on parameters
  1493. // create communication group for allgather operator
  1494. slice_shape = tensor_layout.opt_shard_slice_shape();
  1495. std::vector<Group> dev_group;
  1496. if (distribute_operator->CreateGroupByTensorMap(tensor_layout.origin_tensor_map().array(), &dev_group) ==
  1497. Status::SUCCESS &&
  1498. !dev_group.empty()) {
  1499. opt_shard_group = dev_group[0].name();
  1500. // set communication group in tensor layout for checkpoint saving
  1501. tensor_layout.set_opt_shard_group(opt_shard_group);
  1502. MS_LOG(INFO) << "Parallel optimizer: create group " << opt_shard_group << " for " << parameter->ToString()
  1503. << " success.";
  1504. } else {
  1505. MS_LOG(WARNING) << "Parallel optimizer: create group for " << parameter->ToString() << " failed.";
  1506. }
  1507. } else {
  1508. MS_LOG(INFO) << "Parallel optimizer: " << parameter->ToString() << "'s shape does not satisfy the conditions.";
  1509. }
  1510. }
  1511. MS_LOG(INFO) << "SetParallelShape slice_shape " << parameter->ToString() << " shape "
  1512. << MakeValue(slice_shape)->ToString() << ", op name is " << distribute_operator->name();
  1513. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1514. MS_EXCEPTION_IF_NULL(parallel_shape);
  1515. // Don't modify it in-place as the pointer of this AbstractValue may used as cache key in StaticAnalysis.
  1516. auto cloned_abstract = abstract->Clone();
  1517. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1518. cloned_abstract->set_shape(parallel_shape);
  1519. parameter->set_abstract(cloned_abstract);
  1520. ParameterPtr parameter_ptr = parameter->cast<ParameterPtr>();
  1521. MS_EXCEPTION_IF_NULL(parameter_ptr);
  1522. parameter_ptr->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(tensor_layout));
  1523. return opt_shard_group;
  1524. }
  1525. void CoverSliceShape(const FuncGraphPtr &root) {
  1526. MS_EXCEPTION_IF_NULL(root);
  1527. auto parameters = root->parameters();
  1528. for (auto &parameter : parameters) {
  1529. MS_EXCEPTION_IF_NULL(parameter->Shape());
  1530. auto iter = g_RefMap.find(parameter);
  1531. if (iter != g_RefMap.end()) {
  1532. std::string group = SetParallelShape(parameter, g_RefMap[parameter]);
  1533. // find all forward nodes that use parameter in graphs and insert allgather if group is not empty
  1534. ApplyParallelOptOnParam(root, parameter, group);
  1535. continue;
  1536. }
  1537. std::pair<AnfNodePtr, int64_t> res = FindSubGraph(root, parameter);
  1538. if (res.first == nullptr) {
  1539. MS_LOG(INFO) << "Parameter " << parameter->ToString() << " don't need to set parallel shape";
  1540. } else {
  1541. std::string group = SetParallelShape(parameter, res);
  1542. // find all forward nodes that use parameter in graphs and insert allgather if group is not empty
  1543. ApplyParallelOptOnParam(root, parameter, group);
  1544. MS_LOG(DEBUG) << "Parameter " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1545. }
  1546. }
  1547. g_RefMap.clear();
  1548. }
  1549. void SetClonedTensorShapeForOptimizer(const FuncGraphPtr &root) {
  1550. MS_EXCEPTION_IF_NULL(root);
  1551. for (auto &cloned_parameter_node : root->parameters()) {
  1552. MS_EXCEPTION_IF_NULL(cloned_parameter_node);
  1553. auto cloned_parameter = cloned_parameter_node->cast<ParameterPtr>();
  1554. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1555. if (!ParameterIsCloned(cloned_parameter_node)) {
  1556. continue;
  1557. }
  1558. auto param_value = cloned_parameter->param_info();
  1559. if (param_value == nullptr) {
  1560. continue;
  1561. }
  1562. // get the cloned index
  1563. int64_t cloned_index = param_value->cloned_index();
  1564. // find the be cloned parameter
  1565. bool found_be_cloned_parameter = false;
  1566. ParameterPtr cloned_from_parameter = nullptr;
  1567. AnfNodePtr cloned_from_node = nullptr;
  1568. for (auto &be_cloned_parameter_node : root->parameters()) {
  1569. MS_EXCEPTION_IF_NULL(be_cloned_parameter_node);
  1570. auto be_cloned_parameter = be_cloned_parameter_node->cast<ParameterPtr>();
  1571. MS_EXCEPTION_IF_NULL(be_cloned_parameter);
  1572. if (!be_cloned_parameter->has_default()) {
  1573. continue;
  1574. }
  1575. auto param_value_in = be_cloned_parameter->param_info();
  1576. if (param_value_in == nullptr) {
  1577. continue;
  1578. }
  1579. if (!param_value_in->be_cloned()) {
  1580. continue;
  1581. }
  1582. // get the be cloned index
  1583. auto &be_cloned_index = param_value_in->be_cloned_index();
  1584. if (std::find(be_cloned_index.begin(), be_cloned_index.end(), cloned_index) != be_cloned_index.end()) {
  1585. found_be_cloned_parameter = true;
  1586. cloned_from_parameter = be_cloned_parameter;
  1587. cloned_from_node = be_cloned_parameter_node;
  1588. }
  1589. }
  1590. if (found_be_cloned_parameter) {
  1591. // set the shape and tensor layout for cloned parameter
  1592. cloned_parameter->set_user_data<TensorLayout>(cloned_from_parameter->user_data<TensorLayout>());
  1593. MS_EXCEPTION_IF_NULL(cloned_parameter_node->abstract());
  1594. MS_EXCEPTION_IF_NULL(cloned_from_node->abstract());
  1595. auto cloned_abstract = cloned_parameter_node->abstract()->Clone();
  1596. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1597. cloned_abstract->set_shape(cloned_from_node->abstract()->GetShapeTrack());
  1598. cloned_parameter_node->set_abstract(cloned_abstract);
  1599. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name()
  1600. << " is cloned, the be cloned parameter is: " << cloned_from_parameter->name()
  1601. << ", clone index is: " << cloned_index;
  1602. } else {
  1603. MS_LOG(EXCEPTION) << "The parameter: " << cloned_parameter->name() << " is cloned, cloned index is "
  1604. << cloned_index << ", but not found the be cloned parameter";
  1605. }
  1606. }
  1607. }
  1608. void SetVirtualDatasetStrategy(const CNodePtr &node) {
  1609. MS_EXCEPTION_IF_NULL(node);
  1610. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1611. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1612. PrimitivePtr prim = GetValueNode<PrimitivePtr>(node->input(0));
  1613. MS_EXCEPTION_IF_NULL(prim);
  1614. if (prim->name() == VIRTUAL_DATA_SET) {
  1615. CheckGlobalDeviceManager();
  1616. int64_t dev_num;
  1617. if (full_batch) {
  1618. dev_num = 1;
  1619. } else {
  1620. dev_num = SizeToLong(g_device_manager->stage_device_num());
  1621. }
  1622. auto attrs_temp = prim->attrs();
  1623. std::vector<Shapes> shape_list = ExtractShape(node);
  1624. if (shape_list.empty()) {
  1625. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1626. }
  1627. std::vector<ValuePtr> elements;
  1628. for (size_t i = 0; i < shape_list[0].size(); i++) {
  1629. if (shape_list[0][i].empty()) {
  1630. MS_LOG(EXCEPTION) << "shape_list[ " << i << " ].size() is zero";
  1631. }
  1632. Dimensions input_strategy = {dev_num};
  1633. for (size_t j = 1; j < shape_list[0][i].size(); j++) {
  1634. input_strategy.push_back(1);
  1635. }
  1636. elements.push_back(MakeValue(input_strategy));
  1637. }
  1638. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1639. attrs_temp[STRATEGY] = strategy;
  1640. (void)prim->SetAttrs(attrs_temp);
  1641. }
  1642. }
  1643. // find previous parallel care node.
  1644. bool FindPreNodes(const AnfNodePtr &node, vector<std::string> *unique_ids) {
  1645. MS_EXCEPTION_IF_NULL(unique_ids);
  1646. // if previous node is a parameter, handle it in the outsize.
  1647. if (node->isa<Parameter>()) {
  1648. return false;
  1649. }
  1650. if (!node->isa<CNode>()) {
  1651. return false;
  1652. }
  1653. CNodePtr cnode = node->cast<CNodePtr>();
  1654. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1655. return false;
  1656. }
  1657. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1658. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1659. if (IsParallelCareNode(cnode) && prim->name() != MAKE_TUPLE && prim->name() != MAKE_LIST) {
  1660. unique_ids->push_back(cnode->UniqueId());
  1661. return true;
  1662. }
  1663. bool find = false;
  1664. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1665. if (prim->name() == DEPEND && index != 1) {
  1666. continue;
  1667. }
  1668. if (FindPreNodes(cnode->inputs()[index], unique_ids)) {
  1669. find = true;
  1670. continue;
  1671. }
  1672. }
  1673. return find;
  1674. }
  1675. void FindLastNodesUniqueId(const std::vector<AnfNodePtr> &all_nodes, std::vector<std::string> *unique_ids) {
  1676. MS_EXCEPTION_IF_NULL(unique_ids);
  1677. for (auto &node : all_nodes) {
  1678. auto cnode = node->cast<CNodePtr>();
  1679. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1680. continue;
  1681. }
  1682. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1683. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1684. if (prim->name() == RETURN) {
  1685. if (!FindPreNodes(cnode, unique_ids)) {
  1686. MS_LOG(WARNING) << "cannot find the last parallel care node in eval graph";
  1687. }
  1688. }
  1689. }
  1690. }
  1691. StrategyPtr GenerateBatchParallelStrategy(const OperatorInfoPtr operator_, const PrimitivePtr prim) {
  1692. MS_EXCEPTION_IF_NULL(operator_);
  1693. MS_EXCEPTION_IF_NULL(prim);
  1694. StrategyPtr strategyPtr;
  1695. std::shared_ptr<Strategys> strategy_v_ptr = operator_->GenerateBatchStrategies();
  1696. MS_EXCEPTION_IF_NULL(strategy_v_ptr);
  1697. strategyPtr = NewStrategy(0, *strategy_v_ptr);
  1698. std::vector<ValuePtr> elements;
  1699. for (size_t i = 0; i < strategy_v_ptr->size(); i++) {
  1700. elements.push_back(MakeValue((*strategy_v_ptr)[i]));
  1701. }
  1702. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1703. // display the strategy generated by batch parallel
  1704. auto attrs = prim->attrs();
  1705. attrs[GEN_STRATEGY] = strategy;
  1706. (void)prim->SetAttrs(attrs);
  1707. MS_LOG(INFO) << "prim " << prim->name() << " batch parallel strategy is " << attrs[GEN_STRATEGY]->ToString();
  1708. return strategyPtr;
  1709. }
  1710. void SetLastNodeStrategy(const StrategyPtr strategyPtr) {
  1711. auto strategys = strategyPtr->GetInputDim();
  1712. for (size_t i = 0; i < strategys.size(); ++i) {
  1713. for (size_t j = 0; j < strategys[i].size(); ++j) {
  1714. strategys[i][j] = 1;
  1715. }
  1716. }
  1717. strategyPtr->ResetInputs(strategys);
  1718. }
  1719. void ExtractInformation(const std::vector<AnfNodePtr> &all_nodes, bool is_training) {
  1720. // load strategy map from checkpoint
  1721. StrategyMap stra_map;
  1722. if (StrategyCheckpoint::GetInstance().LoadCheckPointOn()) {
  1723. if (StrategyCheckpoint::GetInstance().Load(&stra_map) != SUCCESS) {
  1724. MS_LOG(EXCEPTION) << "Load strategy checkpoint failed";
  1725. }
  1726. }
  1727. vector<std::string> last_forward_node_ids;
  1728. if (!is_training) {
  1729. FindLastNodesUniqueId(all_nodes, &last_forward_node_ids);
  1730. MS_LOG(INFO) << "there are " << last_forward_node_ids.size() << " output nodes in eval/predict";
  1731. }
  1732. for (auto &node : all_nodes) {
  1733. auto cnode = node->cast<CNodePtr>();
  1734. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1735. continue;
  1736. }
  1737. SetVirtualDatasetStrategy(cnode);
  1738. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1739. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1740. if (prim->name() == MAKE_TUPLE || prim->name() == MAKE_LIST || prim->name() == RECEIVE) {
  1741. continue;
  1742. }
  1743. auto attrs = prim->attrs();
  1744. MS_LOG(INFO) << "extract information: node: " << node->ToString() << " prim " << prim->name();
  1745. if (IsParallelCareNode(cnode)) {
  1746. std::vector<Shapes> shape_list = ExtractShape(cnode);
  1747. if (shape_list.empty()) {
  1748. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1749. }
  1750. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1751. if (operator_ == nullptr) {
  1752. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->name() << " OperatorInstance failed";
  1753. }
  1754. auto &inputs = cnode->inputs();
  1755. std::vector<ValuePtr> input_value;
  1756. for (size_t index = 1; index < inputs.size(); ++index) {
  1757. if (inputs[index]->isa<ValueNode>()) {
  1758. input_value.push_back(GetValueNode(inputs[index]));
  1759. } else {
  1760. input_value.emplace_back(nullptr);
  1761. }
  1762. }
  1763. StrategyPtr strategyPtr = nullptr;
  1764. (*operator_).set_input_value(input_value);
  1765. (*operator_).set_outputs_dtype(cnode->Type());
  1766. (*operator_).set_cnode(cnode);
  1767. if (prim->name() == RESHAPE) {
  1768. cnode->set_user_data<OperatorInfo>(operator_);
  1769. continue;
  1770. }
  1771. // load strategy checkpoint
  1772. // key of strategy map
  1773. std::string strategy_key_name = "";
  1774. auto param_names = NodeParameterName(cnode);
  1775. if (!param_names.empty()) {
  1776. strategy_key_name = prim->name() + "_" + param_names[0].first;
  1777. }
  1778. bool load_strategy_from_ckpt =
  1779. StrategyCheckpoint::GetInstance().LoadCheckPointOn() && stra_map.find(strategy_key_name) != stra_map.end();
  1780. bool is_last_nodes = std::find(last_forward_node_ids.begin(), last_forward_node_ids.end(), cnode->UniqueId()) !=
  1781. last_forward_node_ids.end();
  1782. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1783. if ((is_last_nodes && !full_batch) || (!StrategyFound(attrs) && !load_strategy_from_ckpt)) {
  1784. MS_LOG(INFO) << "ExtractInformation: the strategy of node " << node->ToString() << " prim " << prim->name()
  1785. << " is empty, using batch parallel";
  1786. strategyPtr = GenerateBatchParallelStrategy(operator_, prim);
  1787. } else if (StrategyFound(attrs)) {
  1788. strategyPtr = ExtractStrategy(attrs);
  1789. } else {
  1790. strategyPtr = stra_map[strategy_key_name];
  1791. }
  1792. if (strategyPtr != nullptr) {
  1793. if (is_last_nodes && full_batch) {
  1794. SetLastNodeStrategy(strategyPtr);
  1795. }
  1796. if (operator_->Init(strategyPtr) == FAILED) {
  1797. MS_LOG(EXCEPTION) << "Failure:operator " << prim->name() << " init failed";
  1798. }
  1799. cnode->set_user_data<OperatorInfo>(operator_);
  1800. } else {
  1801. MS_LOG(EXCEPTION) << "ERROR:strategy_ptr is nullptr";
  1802. }
  1803. }
  1804. }
  1805. }
  1806. TensorLayout GetInputLayoutFromCNode(const std::pair<AnfNodePtr, int64_t> &node_pair) {
  1807. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1808. MS_EXCEPTION_IF_NULL(cnode);
  1809. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1810. MS_EXCEPTION_IF_NULL(distribute_operator);
  1811. int64_t index = node_pair.second;
  1812. if (index > SizeToLong(distribute_operator->inputs_tensor_info().size())) {
  1813. MS_LOG(EXCEPTION) << "The index is out of range, the node_pair.second is " << index - 1 << ", the vector size is "
  1814. << distribute_operator->inputs_tensor_info().size();
  1815. }
  1816. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[LongToSize(index - 1)];
  1817. TensorLayout tensorlayout_in = tensorinfo_in.tensor_layout();
  1818. return tensorlayout_in;
  1819. }
  1820. // if reshape's output connect to several primitive, return the first layout found
  1821. std::shared_ptr<TensorLayout> FindNextLayout(const CNodePtr &cnode) {
  1822. MS_EXCEPTION_IF_NULL(cnode);
  1823. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  1824. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  1825. MS_EXCEPTION_IF_NULL(manager);
  1826. AnfNodeIndexSet node_set = manager->node_users()[cnode];
  1827. for (auto &node_pair : node_set) {
  1828. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1829. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1830. continue;
  1831. }
  1832. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1833. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1834. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1835. MS_EXCEPTION_IF_NULL(node_prim);
  1836. MS_LOG(INFO) << "FindNextLayout prim " << node_prim->name();
  1837. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1838. continue;
  1839. }
  1840. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1841. MS_LOG(INFO) << "FindNextLayout success prim " << node_prim->name();
  1842. auto layout = GetInputLayoutFromCNode(node_pair);
  1843. return std::make_shared<TensorLayout>(layout);
  1844. }
  1845. MS_LOG(DEBUG) << "FindNextLayout failed prim " << node_prim->name() << " " << IsParallelCareNode(use_apply)
  1846. << " " << use_apply->has_user_data<OperatorInfo>();
  1847. auto layout_ptr = FindNextLayout(use_apply);
  1848. if (layout_ptr) {
  1849. return layout_ptr;
  1850. }
  1851. }
  1852. MS_LOG(WARNING) << "FindNextLayout return nullptr, if reshape is not the last primitive, there must be some error";
  1853. return nullptr;
  1854. }
  1855. std::shared_ptr<TensorLayout> GetOutputLayoutFromCNode(const CNodePtr &cnode, size_t output_index) {
  1856. MS_EXCEPTION_IF_NULL(cnode);
  1857. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1858. MS_EXCEPTION_IF_NULL(distribute_operator);
  1859. if (distribute_operator->outputs_tensor_info().size() < output_index) {
  1860. MS_LOG(EXCEPTION) << "outputs_tensor_info size is " << distribute_operator->inputs_tensor_info().size()
  1861. << ", must be less than output_index " << output_index;
  1862. }
  1863. TensorInfo tensorinfo_out = distribute_operator->outputs_tensor_info()[output_index];
  1864. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  1865. return std::make_shared<TensorLayout>(tensorlayout_out);
  1866. }
  1867. std::shared_ptr<TensorLayout> FindPrevParallelCareNodeLayout(const AnfNodePtr &node, size_t output_index) {
  1868. if (!node->isa<CNode>()) {
  1869. return nullptr;
  1870. }
  1871. CNodePtr cnode = node->cast<CNodePtr>();
  1872. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1873. return nullptr;
  1874. }
  1875. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1876. auto layout_ptr = GetOutputLayoutFromCNode(cnode, output_index);
  1877. if (!layout_ptr) {
  1878. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1879. }
  1880. return layout_ptr;
  1881. }
  1882. return nullptr;
  1883. }
  1884. std::shared_ptr<TensorLayout> FindParameterNextLayout(const AnfNodePtr &node) {
  1885. FuncGraphManagerPtr manager = node->func_graph()->manager();
  1886. MS_EXCEPTION_IF_NULL(manager);
  1887. AnfNodeIndexSet node_set = manager->node_users()[node];
  1888. for (auto &node_pair : node_set) {
  1889. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1890. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1891. continue;
  1892. }
  1893. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1894. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1895. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1896. MS_EXCEPTION_IF_NULL(node_prim);
  1897. if ((node_prim->name() == DEPEND && node_pair.second != 1) || node_prim->name() == RESHAPE) {
  1898. continue;
  1899. }
  1900. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1901. auto layout = GetInputLayoutFromCNode(node_pair);
  1902. return std::make_shared<TensorLayout>(layout);
  1903. }
  1904. }
  1905. return nullptr;
  1906. }
  1907. std::shared_ptr<TensorLayout> CreateParameterLayout(const AnfNodePtr &node) {
  1908. // Create DataParallel tensor layout for parameter(support WideDeep).
  1909. auto next_layout = FindParameterNextLayout(node);
  1910. if (next_layout != nullptr) {
  1911. return next_layout;
  1912. }
  1913. CheckGlobalDeviceManager();
  1914. int64_t dev_num = g_device_manager->stage_device_num();
  1915. TensorLayout input_tensor_layout;
  1916. // create input_shape
  1917. Shapes inputs_shape = GetNodeShape(node);
  1918. Shape input_shape_array = inputs_shape[0];
  1919. if (input_shape_array.empty()) {
  1920. MS_LOG(EXCEPTION) << "Don't support reshape a scalar parameter.";
  1921. }
  1922. // create tensor_map
  1923. size_t shape_size = input_shape_array.size();
  1924. TensorMap input_tensor_map_array(SizeToLong(shape_size) - 1, -1);
  1925. input_tensor_map_array.insert(input_tensor_map_array.begin(), 0);
  1926. // create dev_matrix
  1927. Shape dev_matrix_array = {dev_num};
  1928. if (input_tensor_layout.InitFromVector(dev_matrix_array, input_tensor_map_array, input_shape_array) != SUCCESS) {
  1929. MS_LOG(EXCEPTION) << "Create tensor layout for parameter failed.";
  1930. }
  1931. return std::make_shared<TensorLayout>(input_tensor_layout);
  1932. }
  1933. RedistributionOpListPtr InferSensRedistribution(const AnfNodePtr &node, const TensorLayout &loss_layout) {
  1934. MS_EXCEPTION_IF_NULL(node);
  1935. TensorRedistribution tensor_redistribution;
  1936. // create stand alone layout:TensorMap:[all -1],dev_matrix:[dev_num].
  1937. CheckGlobalDeviceManager();
  1938. int64_t dev_num = g_device_manager->stage_device_num();
  1939. TensorLayout stand_alone_layout;
  1940. Shapes inputs_shape = GetNodeShape(node);
  1941. if (inputs_shape.empty()) {
  1942. MS_LOG(EXCEPTION) << "InferSensRedistribution failed cause inputs shape is empty.";
  1943. }
  1944. Shape input_shape_array = inputs_shape[0];
  1945. if (input_shape_array.empty()) {
  1946. MS_LOG(INFO) << "No need to redistribution for sens.";
  1947. return nullptr;
  1948. }
  1949. // TensorMap
  1950. TensorMap stand_alone_tensor_map_array(SizeToLong(input_shape_array.size()), -1);
  1951. // Dev_matrix
  1952. Shape dev_matrix_array = {dev_num};
  1953. if (stand_alone_layout.InitFromVector(dev_matrix_array, stand_alone_tensor_map_array, input_shape_array) == FAILED) {
  1954. MS_LOG(EXCEPTION) << "Create tensor layout for Sens failed.";
  1955. }
  1956. // Infer Redistribution op list for stand alone and loss layout.
  1957. RankList dev_list = g_device_manager->GetDeviceListInThisStage();
  1958. if (tensor_redistribution.Init(stand_alone_layout, loss_layout, dev_list) == FAILED) {
  1959. MS_LOG(EXCEPTION) << "Redistribution for Sens init failed.";
  1960. }
  1961. RedistributionOpListPtr sens_redistribution_list = tensor_redistribution.InferTensorRedistributionOperatorList();
  1962. MS_EXCEPTION_IF_NULL(sens_redistribution_list);
  1963. return sens_redistribution_list;
  1964. }
  1965. std::shared_ptr<TensorLayout> FindPrevLayout(const AnfNodePtr &node) {
  1966. if (node->isa<Parameter>()) {
  1967. return CreateParameterLayout(node);
  1968. }
  1969. if (!node->isa<CNode>()) {
  1970. return nullptr;
  1971. }
  1972. CNodePtr cnode = node->cast<CNodePtr>();
  1973. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1974. return nullptr;
  1975. }
  1976. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1977. auto layout_ptr = GetOutputLayoutFromCNode(cnode, 0);
  1978. if (!layout_ptr) {
  1979. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1980. }
  1981. return layout_ptr;
  1982. }
  1983. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1984. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1985. if (prim->name() == TUPLE_GETITEM) {
  1986. auto tuple_index = GetTupleGetItemIndex(cnode);
  1987. auto layout_ptr = FindPrevParallelCareNodeLayout(cnode->input(1), LongToSize(tuple_index));
  1988. if (!layout_ptr) {
  1989. MS_LOG(EXCEPTION)
  1990. << " Failure:FindPrevLayout failed, tuple_getitem before reshape, but there does not exit a parallel care node "
  1991. "before tuple_getitem!";
  1992. }
  1993. return layout_ptr;
  1994. }
  1995. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1996. if (prim->name() == DEPEND && index != 1) {
  1997. continue;
  1998. }
  1999. auto layout_ptr = FindPrevLayout(cnode->inputs()[index]);
  2000. if (!layout_ptr) {
  2001. continue;
  2002. }
  2003. return layout_ptr;
  2004. }
  2005. MS_LOG(WARNING) << "FindPrevLayout return nullptr, if reshape is not the first primitive, there must be some error";
  2006. return nullptr;
  2007. }
  2008. void ReshapeInit(const std::vector<AnfNodePtr> &all_nodes) {
  2009. for (auto &node : all_nodes) {
  2010. auto cnode = node->cast<CNodePtr>();
  2011. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  2012. continue;
  2013. }
  2014. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  2015. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  2016. continue;
  2017. }
  2018. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  2019. MS_EXCEPTION_IF_NULL(prim);
  2020. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  2021. if (operator_info == nullptr) {
  2022. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->ToString() << " OperatorInstance is nullptr";
  2023. }
  2024. if (prim->name() != RESHAPE) {
  2025. continue;
  2026. }
  2027. auto attrs = prim->attrs();
  2028. if (StrategyFound(attrs)) {
  2029. MS_LOG(EXCEPTION) << "Setting strategy for Reshape goes for nothing!";
  2030. }
  2031. MS_ASSERT(cnode->inputs().size() == 3);
  2032. auto prev_layout_ptr = FindPrevLayout(cnode->input(1));
  2033. if (prev_layout_ptr) {
  2034. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  2035. reshape_info_ptr->SetInputLayout(*prev_layout_ptr);
  2036. }
  2037. auto next_layout_ptr = FindNextLayout(cnode);
  2038. if (next_layout_ptr) {
  2039. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  2040. reshape_info_ptr->SetOutputLayout(*next_layout_ptr);
  2041. }
  2042. if (operator_info->Init(nullptr) == FAILED) {
  2043. MS_LOG(EXCEPTION) << "Failure:operator " << prim->ToString() << " init failed";
  2044. }
  2045. }
  2046. }
  2047. CNodePtr HandleDependLoss(const CNodePtr &cnode) {
  2048. // Handle return->depend->loss
  2049. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2050. MS_EXCEPTION_IF_NULL(prim);
  2051. if (prim->name() == DEPEND) {
  2052. auto depend_before = cnode->input(1)->cast<CNodePtr>();
  2053. MS_EXCEPTION_IF_NULL(depend_before);
  2054. return HandleDependLoss(depend_before);
  2055. }
  2056. return cnode;
  2057. }
  2058. LossNodeInfo FindLossCNode(const FuncGraphPtr &func_graph) {
  2059. LossNodeInfo loss_node_info;
  2060. MS_EXCEPTION_IF_NULL(func_graph);
  2061. CNodePtr return_node = func_graph->get_return();
  2062. MS_EXCEPTION_IF_NULL(return_node);
  2063. if (return_node->size() < 2) {
  2064. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  2065. }
  2066. AnfNodePtr pre_node = return_node->input(1);
  2067. MS_EXCEPTION_IF_NULL(pre_node);
  2068. auto pre_cnode = pre_node->cast<CNodePtr>();
  2069. if (pre_cnode == nullptr || !IsValueNode<Primitive>(pre_cnode->input(0))) {
  2070. return loss_node_info;
  2071. }
  2072. if (!IsValueNode<Primitive>(pre_cnode->input(0))) {
  2073. MS_LOG(DEBUG) << "pre_cnode:" << pre_cnode->ToString();
  2074. return loss_node_info;
  2075. }
  2076. auto prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  2077. // return -> cast
  2078. if (prim->name() == CAST && !pre_cnode->has_user_data<OperatorInfo>()) {
  2079. pre_cnode = pre_cnode->input(1)->cast<CNodePtr>();
  2080. MS_EXCEPTION_IF_NULL(pre_cnode);
  2081. }
  2082. pre_cnode = HandleDependLoss(pre_cnode);
  2083. auto current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  2084. // notice: the GetNext op has not input
  2085. if (INVALID_LOSS_OPS.find(current_prim->name()) != INVALID_LOSS_OPS.end()) {
  2086. MS_LOG(INFO) << "The loss is: " << current_prim->name();
  2087. loss_node_info.loss_node = pre_cnode;
  2088. return loss_node_info;
  2089. }
  2090. // size of common cnode is larger than 1
  2091. if (pre_cnode->size() < 2) {
  2092. MS_LOG(EXCEPTION) << pre_cnode->ToString() << " size( " << pre_cnode->inputs().size() << " ) is smaller than 2";
  2093. }
  2094. // return -> tuple_getitem -> loss
  2095. if (current_prim->name() == TUPLE_GETITEM) {
  2096. auto tuple_index = GetTupleGetItemIndex(pre_cnode);
  2097. AnfNodePtr pre_pre_node = pre_cnode->input(1);
  2098. MS_EXCEPTION_IF_NULL(pre_pre_node);
  2099. auto pre_pre_cnode = pre_pre_node->cast<CNodePtr>();
  2100. loss_node_info.has_tuple_getitem = true;
  2101. loss_node_info.dout_index = tuple_index;
  2102. loss_node_info.loss_node = pre_pre_cnode;
  2103. return loss_node_info;
  2104. }
  2105. // return -> make_tuple
  2106. if (current_prim->name() == MAKE_TUPLE) {
  2107. MS_LOG(WARNING) << "The loss have make_tuple, it is not supported";
  2108. return loss_node_info;
  2109. }
  2110. // return -> loss
  2111. loss_node_info.loss_node = pre_cnode;
  2112. MS_LOG(DEBUG) << "The loss name is " << current_prim->name();
  2113. return loss_node_info;
  2114. }
  2115. TensorLayouts GetLossNodeGradOutputLayout(const LossNodeInfo &node_info) {
  2116. TensorLayouts ret;
  2117. auto loss_cnode = node_info.loss_node;
  2118. MS_EXCEPTION_IF_NULL(loss_cnode);
  2119. ValueNodePtr prim_anf_node = loss_cnode->input(0)->cast<ValueNodePtr>();
  2120. MS_EXCEPTION_IF_NULL(prim_anf_node);
  2121. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  2122. MS_EXCEPTION_IF_NULL(prim);
  2123. if (INVALID_LOSS_OPS.find(prim->name()) != INVALID_LOSS_OPS.end()) {
  2124. MS_LOG(WARNING) << "The loss name is: " << prim->name() << ", do nothing for split sens now";
  2125. return ret;
  2126. }
  2127. OperatorInfoPtr operator_info = loss_cnode->user_data<OperatorInfo>();
  2128. MS_EXCEPTION_IF_NULL(operator_info);
  2129. TensorInfo loss_grad_tensor_info;
  2130. size_t op_output_size = operator_info->outputs_tensor_info().size();
  2131. MS_LOG(INFO) << "The loss name is " << operator_info->name() << ", the has tuple item is "
  2132. << node_info.has_tuple_getitem << ", the output size is " << op_output_size << ", the dout_index is "
  2133. << node_info.dout_index;
  2134. if ((op_output_size == 0) || (op_output_size <= LongToSize(node_info.dout_index))) {
  2135. MS_LOG(EXCEPTION) << "The index is " << node_info.dout_index << ", but the size of outputs is " << op_output_size;
  2136. }
  2137. if (!node_info.has_tuple_getitem && (op_output_size > 1)) {
  2138. MS_LOG(EXCEPTION) << "Currently, it is not supported that the sens is a tuple.";
  2139. }
  2140. loss_grad_tensor_info = operator_info->outputs_tensor_info()[LongToSize(node_info.dout_index)];
  2141. ret.push_back(loss_grad_tensor_info.tensor_layout());
  2142. return ret;
  2143. }
  2144. void SplitSens(const CNodePtr &grad_sens_node, const TensorLayout &loss_grad_layout) {
  2145. MS_EXCEPTION_IF_NULL(grad_sens_node);
  2146. if (grad_sens_node->size() <= 1) {
  2147. MS_LOG(EXCEPTION) << "The size of grad sens node is smaller than 2";
  2148. }
  2149. AnfNodePtr sens_tensor_node = grad_sens_node->input(1);
  2150. MS_EXCEPTION_IF_NULL(sens_tensor_node);
  2151. Shapes sens_shapes = GetNodeShape(sens_tensor_node);
  2152. if (sens_shapes.size() != 1) {
  2153. MS_LOG(EXCEPTION) << "GetNodeShape for sens_tensor_node, output size is not 1";
  2154. }
  2155. // If the shape of sens tensor is [] or [1], no need to split it.
  2156. Shape sens_shape = sens_shapes[0];
  2157. if (sens_shape.empty() || ((sens_shape.size() == 1) && (sens_shape[0] == 1))) {
  2158. if (sens_tensor_node->isa<Parameter>()) {
  2159. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  2160. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  2161. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  2162. }
  2163. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", no need to split sens";
  2164. return;
  2165. }
  2166. auto loss_shape = loss_grad_layout.tensor_shape().array();
  2167. if (loss_shape != sens_shape) {
  2168. MS_LOG(EXCEPTION) << "The shape of sens is not equal to loss output, it is unsupported now. Sens shape is "
  2169. << ShapeToString(sens_shape) << ", loss shape is " << ShapeToString(loss_shape);
  2170. }
  2171. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", split it.";
  2172. if (!IsValueNode<Tensor>(sens_tensor_node)) {
  2173. if (sens_tensor_node->isa<Parameter>()) {
  2174. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  2175. AbstractBasePtr abstract = sens_tensor_node->abstract();
  2176. MS_EXCEPTION_IF_NULL(abstract);
  2177. auto slice_shape = loss_grad_layout.slice_shape().array();
  2178. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  2179. MS_EXCEPTION_IF_NULL(parallel_shape);
  2180. auto cloned_abstract = abstract->Clone();
  2181. MS_EXCEPTION_IF_NULL(cloned_abstract);
  2182. cloned_abstract->set_shape(parallel_shape);
  2183. sens_tensor_node->set_abstract(cloned_abstract);
  2184. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  2185. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  2186. return;
  2187. }
  2188. if (sens_tensor_node->isa<CNode>()) {
  2189. auto op_list_ptr = InferSensRedistribution(sens_tensor_node, loss_grad_layout);
  2190. if (op_list_ptr == nullptr) {
  2191. return;
  2192. }
  2193. auto sens_tensor_cnode = sens_tensor_node->cast<CNodePtr>();
  2194. auto func_graph = grad_sens_node->func_graph();
  2195. MS_EXCEPTION_IF_NULL(func_graph);
  2196. InsertRedistribution(op_list_ptr, grad_sens_node, func_graph, 1, sens_tensor_cnode);
  2197. return;
  2198. }
  2199. MS_LOG(EXCEPTION) << "The type of sens node is not Tensor or Parameter or CNode, it is unsupported now.";
  2200. }
  2201. // Use _GetTensorSlice operator to split the sens tensor
  2202. FuncGraphPtr func_graph = grad_sens_node->func_graph(); // only cnode can get the graph
  2203. MS_EXCEPTION_IF_NULL(func_graph);
  2204. Operator op = CreateGetTensorSliceOp(loss_grad_layout);
  2205. InsertGetTensorSliceOp(op, grad_sens_node, func_graph, 1, SPLIT_SENS);
  2206. }
  2207. void InsertForwardOps(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2208. MS_EXCEPTION_IF_NULL(distribute_operator);
  2209. MS_EXCEPTION_IF_NULL(cnode);
  2210. OperatorVector forward_op = distribute_operator->forward_op();
  2211. if (!forward_op.empty()) {
  2212. MS_LOG(INFO) << "Insert forward op for " << distribute_operator->name();
  2213. ForwardCommunication(forward_op, cnode);
  2214. }
  2215. }
  2216. void StepReplace(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2217. MS_EXCEPTION_IF_NULL(distribute_operator);
  2218. MS_EXCEPTION_IF_NULL(cnode);
  2219. // StepReplaceOp
  2220. OperatorVector replace_op = distribute_operator->replace_op();
  2221. if (!replace_op.empty()) {
  2222. MS_LOG(INFO) << "StepReplaceOp " << cnode->ToString();
  2223. StepReplaceOp(replace_op, cnode);
  2224. }
  2225. // StepReplaceGraph: after calling StepReplaceGraph, cnode can not be used anymore.
  2226. ReplaceGraphPtr replace_graph = distribute_operator->replace_graph(cnode);
  2227. if (!replace_op.empty() && replace_graph) {
  2228. MS_LOG(EXCEPTION) << "Only one of replace_op or replace_op can be used";
  2229. }
  2230. if (replace_graph) {
  2231. MS_LOG(INFO) << "StepReplaceGraph " << cnode->ToString();
  2232. StepReplaceGraph(replace_graph, cnode);
  2233. }
  2234. }
  2235. void HandleDropoutNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2236. MS_EXCEPTION_IF_NULL(distribute_operator);
  2237. MS_EXCEPTION_IF_NULL(cnode);
  2238. std::string op_name = distribute_operator->name();
  2239. if (op_name.find(DROPOUT_DO_MASK) == std::string::npos) {
  2240. return;
  2241. }
  2242. DropoutDoMaskInfoPtr dropout_do_mask = std::dynamic_pointer_cast<DropoutDoMaskInfo>(distribute_operator);
  2243. MS_EXCEPTION_IF_NULL(dropout_do_mask);
  2244. std::vector<Operator> replace_op = dropout_do_mask->GetDropoutGenMaskReplaceOp(cnode);
  2245. if (replace_op.empty()) {
  2246. MS_LOG(DEBUG) << "No need to replace dropout_gen_mask";
  2247. return;
  2248. }
  2249. if (cnode->inputs().size() != DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  2250. MS_LOG(EXCEPTION) << "The size of drop out do mask cnode's input is not " << DROPOUT_DO_MASK_CNODE_INPUT_SIZE;
  2251. }
  2252. ReplaceOneOp(replace_op[0], cnode->input(DROPOUT_GEN_MASK_INDEX)->cast<CNodePtr>());
  2253. }
  2254. void HandleTileNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2255. MS_EXCEPTION_IF_NULL(cnode);
  2256. if (cnode->size() < 3 || !IsValueNode<Primitive>(cnode->input(0))) {
  2257. return;
  2258. }
  2259. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2260. if (prim->name() != TILE) {
  2261. return;
  2262. }
  2263. TileInfoPtr tile = std::dynamic_pointer_cast<TileInfo>(distribute_operator);
  2264. MS_EXCEPTION_IF_NULL(tile);
  2265. tile->UpdateMultiples(cnode);
  2266. }
  2267. void HandleSpecialNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2268. HandleDropoutNode(distribute_operator, cnode);
  2269. HandleTileNode(distribute_operator, cnode);
  2270. }
  2271. std::set<FuncGraphPtr> FindForwardGraphByRootNodes(const AnfNodeSet &root_all_nodes) {
  2272. // J->CNode->Graph
  2273. std::set<FuncGraphPtr> graph_set;
  2274. for (auto &node : root_all_nodes) {
  2275. MS_EXCEPTION_IF_NULL(node);
  2276. if (!node->isa<CNode>()) {
  2277. continue;
  2278. }
  2279. auto cnode = node->cast<CNodePtr>();
  2280. if ((cnode->size() < 2) || !IsValueNode<Primitive>(cnode->input(0))) {
  2281. continue;
  2282. }
  2283. auto expect_j_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2284. if (expect_j_prim->name() != J) {
  2285. continue;
  2286. }
  2287. if (IsValueNode<FuncGraph>(cnode->input(1))) {
  2288. auto graph = GetValueNode<FuncGraphPtr>(cnode->input(1));
  2289. MS_LOG(DEBUG) << "Find the forward graph success";
  2290. graph_set.insert(graph);
  2291. auto manager = graph->manager();
  2292. MS_EXCEPTION_IF_NULL(manager);
  2293. auto graph_used = manager->func_graphs_used_total(graph);
  2294. for (auto &sub_graph : graph_used) {
  2295. graph_set.insert(sub_graph);
  2296. }
  2297. }
  2298. }
  2299. return graph_set;
  2300. }
  2301. void StepSplitSens(const std::pair<CNodePtr, LossNodeInfo> &sens_loss_pair) {
  2302. CNodePtr sens_node = sens_loss_pair.first;
  2303. auto loss_node = sens_loss_pair.second;
  2304. auto loss_grad_layout = GetLossNodeGradOutputLayout(loss_node);
  2305. if (!loss_grad_layout.empty()) {
  2306. SplitSens(sens_node, loss_grad_layout[0]);
  2307. }
  2308. }
  2309. // Sens node satisfies the following conditions: cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  2310. std::vector<std::pair<CNodePtr, LossNodeInfo>> GetSensLossPairs(const FuncGraphPtr &root) {
  2311. MS_EXCEPTION_IF_NULL(root);
  2312. std::vector<std::pair<CNodePtr, LossNodeInfo>> sens_loss_pairs;
  2313. for (auto &node : root->nodes()) {
  2314. if (!node->isa<CNode>()) {
  2315. continue;
  2316. }
  2317. // cnode(sens)-->cnode(tuple_getitem)
  2318. auto sens_cnode = node->cast<CNodePtr>();
  2319. AnfNodePtr expect_tuple_getitem = sens_cnode->input(0);
  2320. MS_EXCEPTION_IF_NULL(expect_tuple_getitem);
  2321. if (!expect_tuple_getitem->isa<CNode>()) {
  2322. continue;
  2323. }
  2324. auto expect_tuple_getitem_cnode = expect_tuple_getitem->cast<CNodePtr>();
  2325. if (!IsSomePrimitive(expect_tuple_getitem_cnode, TUPLE_GETITEM)) {
  2326. continue;
  2327. }
  2328. // cnode(sens)-->cnode(tuple_getitem)-->cnode
  2329. AnfNodePtr expect_anonymous = expect_tuple_getitem_cnode->input(1);
  2330. MS_EXCEPTION_IF_NULL(expect_anonymous);
  2331. if (!expect_anonymous->isa<CNode>()) {
  2332. continue;
  2333. }
  2334. // cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  2335. auto expect_anonymous_cnode = expect_anonymous->cast<CNodePtr>();
  2336. AnfNodePtr expect_j = expect_anonymous_cnode->input(0);
  2337. MS_EXCEPTION_IF_NULL(expect_j);
  2338. if (!expect_j->isa<CNode>()) {
  2339. continue;
  2340. }
  2341. auto expect_j_cnode = expect_j->cast<CNodePtr>();
  2342. if (!IsSomePrimitive(expect_j_cnode, J)) {
  2343. continue;
  2344. }
  2345. if (!IsValueNode<FuncGraph>(expect_j_cnode->input(1))) {
  2346. MS_LOG(EXCEPTION) << "Sens can't find the corresponding graph.";
  2347. }
  2348. auto func_graph = GetValueNode<FuncGraphPtr>(expect_j_cnode->input(1));
  2349. auto loss_node_info = FindLossCNode(func_graph);
  2350. if (loss_node_info.loss_node == nullptr) {
  2351. MS_LOG(WARNING) << "Can not find the loss cnode";
  2352. continue;
  2353. }
  2354. std::pair<CNodePtr, LossNodeInfo> sens_loss_pair = std::make_pair(sens_cnode, loss_node_info);
  2355. sens_loss_pairs.push_back(sens_loss_pair);
  2356. }
  2357. return sens_loss_pairs;
  2358. }
  2359. bool IsLastStage() {
  2360. MS_EXCEPTION_IF_NULL(g_device_manager);
  2361. auto stage_num = g_device_manager->stage_num();
  2362. auto stage_id = g_device_manager->stage_id();
  2363. return ((stage_num - 1) == stage_id);
  2364. }
  2365. void ParallelCommunication(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes,
  2366. const FuncGraphManagerPtr &manager) {
  2367. MS_EXCEPTION_IF_NULL(root);
  2368. MS_EXCEPTION_IF_NULL(manager);
  2369. TensorRedistribution tensor_redistribution;
  2370. std::vector<std::pair<CNodePtr, LossNodeInfo>> sens_loss_pairs = GetSensLossPairs(root);
  2371. bool has_backward = !sens_loss_pairs.empty();
  2372. // split sens must before inserting the operators.
  2373. for (auto &pair : sens_loss_pairs) {
  2374. // If the shape of grad-sens tensor is not [] or [1], use get tensor slice to handle it.
  2375. // If the type of sens node is not Tensor, it is unsupported now, do nothing default.
  2376. if (IsLastStage()) {
  2377. StepSplitSens(pair);
  2378. }
  2379. }
  2380. for (auto &node : all_nodes) {
  2381. MS_EXCEPTION_IF_NULL(node);
  2382. if (node->isa<CNode>()) {
  2383. auto cnode = node->cast<CNodePtr>();
  2384. // the make_tuple is parallel care node, but it may have not operator info
  2385. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  2386. continue;
  2387. }
  2388. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  2389. MS_EXCEPTION_IF_NULL(distribute_operator);
  2390. // insert forward ops
  2391. if (!IsSomePrimitive(cnode, RECEIVE)) {
  2392. InsertForwardOps(distribute_operator, cnode);
  2393. }
  2394. // insert redistribution ops
  2395. StepRedistribution(cnode, distribute_operator, cnode, tensor_redistribution, cnode);
  2396. // insert backward ops
  2397. if (has_backward && !IsSomePrimitive(cnode, RECEIVE)) {
  2398. BackwardCommunication(root, distribute_operator, cnode, sens_loss_pairs);
  2399. }
  2400. HandleSpecialNode(distribute_operator, cnode);
  2401. } else if (IsValueNode<Tensor>(node) || IsValueNode<ValueList>(node) || IsValueNode<ValueTuple>(node)) {
  2402. StepSplitTensor(node, manager);
  2403. }
  2404. }
  2405. for (auto &node : all_nodes) {
  2406. MS_EXCEPTION_IF_NULL(node);
  2407. if (node->isa<CNode>()) {
  2408. auto cnode = node->cast<CNodePtr>();
  2409. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>() || IsSomePrimitive(cnode, RECEIVE)) {
  2410. continue;
  2411. }
  2412. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  2413. MS_EXCEPTION_IF_NULL(distribute_operator);
  2414. // StepReplace
  2415. StepReplace(distribute_operator, cnode);
  2416. }
  2417. }
  2418. }
  2419. namespace {
  2420. void RevertSymbolicKeyInstance(const FuncGraphPtr &root, const AnfNodePtr &node) {
  2421. MS_EXCEPTION_IF_NULL(root);
  2422. MS_EXCEPTION_IF_NULL(node);
  2423. auto symbolic_key = GetValueNode<SymbolicKeyInstancePtr>(node);
  2424. MS_EXCEPTION_IF_NULL(symbolic_key);
  2425. auto all_upstream_node = root->manager()->node_users()[node];
  2426. for (auto &upstream_node : all_upstream_node) {
  2427. FuncGraphPtr fg = upstream_node.first->func_graph();
  2428. if (symbolic_key->node()->isa<Parameter>()) {
  2429. for (auto &param : root->parameters()) {
  2430. if (*param == *symbolic_key->node()) {
  2431. AnfNodePtr reverted_node = root->NewCNode({NewValueNode(prim::kPrimEmbed), param});
  2432. MS_EXCEPTION_IF_NULL(reverted_node);
  2433. MS_LOG(DEBUG) << "before replace " << node->ToString() << " to node " << reverted_node->DebugString();
  2434. (void)fg->manager()->Replace(node, reverted_node);
  2435. MS_LOG(DEBUG) << "revert node " << node->ToString() << " to node " << reverted_node->DebugString();
  2436. }
  2437. }
  2438. }
  2439. }
  2440. }
  2441. } // namespace
  2442. void HandleSymbolicKeyInstance(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes) {
  2443. MS_EXCEPTION_IF_NULL(root);
  2444. for (auto &node : all_nodes) {
  2445. // revert back SymbolicKeyInstance to embed() primitive
  2446. if (IsValueNode<SymbolicKeyInstance>(node)) {
  2447. RevertSymbolicKeyInstance(root, node);
  2448. continue;
  2449. }
  2450. }
  2451. }
  2452. std::vector<std::pair<std::string, int64_t>> NodeParameterName(const CNodePtr &node) {
  2453. std::vector<AnfNodePtr> node_inputs{node->inputs()};
  2454. std::vector<std::pair<std::string, int64_t>> param_names;
  2455. for (int64_t i = 0; i < UlongToLong(node_inputs.size()); ++i) {
  2456. auto input = node_inputs[i];
  2457. if (input->isa<Parameter>()) {
  2458. auto input_parameter = input->cast<ParameterPtr>();
  2459. if (input_parameter->has_default() && ParameterRequireGrad(input_parameter)) {
  2460. param_names.push_back({input_parameter->name(), i});
  2461. }
  2462. } else if (input->isa<CNode>()) {
  2463. CNodePtr cnode = input->cast<CNodePtr>();
  2464. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2465. return param_names;
  2466. }
  2467. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  2468. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  2469. if (prim->name() == CAST && cnode->inputs().size() >= 1) {
  2470. auto cast_input = cnode->inputs()[1];
  2471. if (cast_input->isa<Parameter>()) {
  2472. auto cast_input_parameter = cast_input->cast<ParameterPtr>();
  2473. if (cast_input_parameter->has_default() && ParameterRequireGrad(cast_input_parameter)) {
  2474. param_names.push_back({cast_input_parameter->name(), i});
  2475. }
  2476. }
  2477. }
  2478. }
  2479. }
  2480. return param_names;
  2481. }
  2482. void CheckpointStrategy(const std::vector<AnfNodePtr> &all_nodes) {
  2483. StrategyMap stra_map;
  2484. TensorInfoMap tensor_info_map;
  2485. ManualShapeMap manual_shape_map;
  2486. for (auto &node : all_nodes) {
  2487. MS_EXCEPTION_IF_NULL(node);
  2488. auto cnode = node->cast<CNodePtr>();
  2489. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  2490. continue;
  2491. }
  2492. auto param_names = NodeParameterName(cnode);
  2493. if (param_names.empty()) {
  2494. continue;
  2495. }
  2496. string param_name = param_names[0].first;
  2497. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2498. MS_EXCEPTION_IF_NULL(prim);
  2499. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  2500. if (operator_info) {
  2501. if (operator_info->name().find(RESHAPEINFO) != std::string::npos) {
  2502. continue;
  2503. }
  2504. std::vector<TensorInfo> input_tensor_info = operator_info->inputs_tensor_info();
  2505. std::string stratey_key_name = prim->name() + "_" + param_name;
  2506. stra_map[stratey_key_name] = operator_info->strategy();
  2507. for (auto param_name_pair : param_names) {
  2508. if (param_name_pair.second - 1 >= UlongToLong(input_tensor_info.size())) {
  2509. continue;
  2510. }
  2511. tensor_info_map[param_name_pair.first] = input_tensor_info[param_name_pair.second - 1];
  2512. }
  2513. if (operator_info->name().find(EMBEDDING_LOOKUP) != std::string::npos ||
  2514. operator_info->name().find(GATHERV2) != std::string::npos) {
  2515. auto gatherv2_info = std::dynamic_pointer_cast<GatherV2PInfo>(operator_info);
  2516. auto param_split_shapes = gatherv2_info->param_split_shapes();
  2517. auto index_offsets = gatherv2_info->index_offsets();
  2518. if (param_split_shapes.size() != index_offsets.size()) {
  2519. MS_LOG(EXCEPTION) << "In manual split, the param_split_shapes and index_offsets length should be same.";
  2520. }
  2521. std::vector<std::pair<int64_t, int64_t>> manual_shape;
  2522. for (int64_t i = 0; i < UlongToLong(param_split_shapes.size()); ++i) {
  2523. manual_shape.push_back({param_split_shapes[i], index_offsets[i]});
  2524. }
  2525. manual_shape_map[param_name] = manual_shape;
  2526. }
  2527. }
  2528. }
  2529. if (StrategyCheckpoint::GetInstance().Save(stra_map, tensor_info_map, &manual_shape_map) != SUCCESS) {
  2530. MS_LOG(EXCEPTION) << "Save strategy checkpoint failed";
  2531. }
  2532. }
  2533. void SetForwardFlag(const std::vector<AnfNodePtr> &all_nodes) {
  2534. for (auto &node : all_nodes) {
  2535. MS_EXCEPTION_IF_NULL(node);
  2536. if (!node->isa<CNode>()) {
  2537. continue;
  2538. }
  2539. auto cnode = node->cast<CNodePtr>();
  2540. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2541. continue;
  2542. }
  2543. // CNode is globally unique.
  2544. MS_LOG(DEBUG) << "Set forward flag " << cnode->DebugString() << ".";
  2545. cnode->set_in_forward_flag(true);
  2546. }
  2547. }
  2548. void SetForwardFlag(const AnfNodeSet &all_nodes) {
  2549. for (auto &node : all_nodes) {
  2550. MS_EXCEPTION_IF_NULL(node);
  2551. if (!node->isa<CNode>()) {
  2552. continue;
  2553. }
  2554. auto cnode = node->cast<CNodePtr>();
  2555. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2556. continue;
  2557. }
  2558. // CNode is globally unique.
  2559. cnode->set_in_forward_flag(true);
  2560. }
  2561. }
  2562. std::set<FuncGraphPtr> ForwardGraph(const FuncGraphPtr &root) {
  2563. MS_EXCEPTION_IF_NULL(root);
  2564. const auto &all_nodes = root->nodes();
  2565. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2566. return graph_set;
  2567. }
  2568. std::vector<AnfNodePtr> FindRootForwardCNode(const FuncGraphPtr &graph, const AnfNodeSet &all_nodes) {
  2569. MS_EXCEPTION_IF_NULL(graph);
  2570. std::vector<AnfNodePtr> root_forward_nodes;
  2571. auto loss_cnode = FindLossCNode(graph).loss_node;
  2572. if (loss_cnode == nullptr) {
  2573. MS_LOG(WARNING) << "Can not find the loss cnode";
  2574. return root_forward_nodes;
  2575. }
  2576. auto loss_cnode_id = loss_cnode->UniqueIdThroughCopy();
  2577. for (auto &node : all_nodes) {
  2578. MS_EXCEPTION_IF_NULL(node);
  2579. if (!node->isa<CNode>()) {
  2580. continue;
  2581. }
  2582. auto cnode = node->cast<CNodePtr>();
  2583. auto root_node_id = node->UniqueIdThroughCopy();
  2584. if (loss_cnode_id == root_node_id) {
  2585. root_forward_nodes = DeepLinkedGraphSearch(cnode);
  2586. break;
  2587. }
  2588. }
  2589. return root_forward_nodes;
  2590. }
  2591. void InsertShapeOp(const CNodePtr &node, const AnfNodePtr &pre_node, const FuncGraphPtr &root) {
  2592. // shape op doesn't have params and attrs.
  2593. OperatorParams params;
  2594. OperatorAttrs attrs;
  2595. auto shape_value = GetValueNode(node->input(2))->cast<ValueSequeuePtr>();
  2596. MS_EXCEPTION_IF_NULL(shape_value);
  2597. auto shape = shape_value->value();
  2598. if (shape.empty()) {
  2599. return;
  2600. }
  2601. OperatorArgs args = std::make_pair(attrs, params);
  2602. Operator op = std::make_pair(SHAPE_OP, args);
  2603. InsertNode(op, node, 2, pre_node, root, "shape");
  2604. }
  2605. static AnfNodePtr FindGrad(const CNodePtr &cnode) {
  2606. for (auto &node : cnode->inputs()) {
  2607. if (!node->isa<CNode>()) {
  2608. continue;
  2609. }
  2610. if (!IsPrimitiveCNode(node, prim::kPrimEnvGetItem)) {
  2611. return FindGrad(node->cast<CNodePtr>());
  2612. } else {
  2613. return node;
  2614. }
  2615. }
  2616. return nullptr;
  2617. }
  2618. void HandleRootReshapeAndSaveStrategy(const std::vector<AnfNodePtr> &all_nodes) {
  2619. // If root graph has reshape op. Find the corresponding parameter.
  2620. // Reshape's shape is the shape of the parameter.
  2621. auto executor = pipeline::ExecutorPy::GetInstance();
  2622. for (auto &node : all_nodes) {
  2623. if (!node->isa<CNode>()) {
  2624. continue;
  2625. }
  2626. auto cnode = node->cast<CNodePtr>();
  2627. if (!IsValueNode<Primitive>(cnode->input(0)) || cnode == nullptr) {
  2628. continue;
  2629. }
  2630. if (cnode->in_forward_flag()) {
  2631. // Save strategy in executor
  2632. OperatorInfoPtr op_info = cnode->user_data<OperatorInfo>();
  2633. if (op_info) {
  2634. auto stra_ptr = op_info->strategy();
  2635. if (stra_ptr) {
  2636. auto strategy = stra_ptr->GetInputDim();
  2637. // fullname with scope should be found in step parallel end ir
  2638. executor->SetCNodeStrategy(cnode->fullname_with_scope(), strategy);
  2639. }
  2640. }
  2641. continue;
  2642. }
  2643. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2644. if (prim->name() != RESHAPE) {
  2645. continue;
  2646. }
  2647. auto root = node->func_graph();
  2648. auto grad_node = FindGrad(cnode);
  2649. if (grad_node) {
  2650. InsertShapeOp(cnode, grad_node, root);
  2651. }
  2652. }
  2653. }
  2654. void MarkForwardCNode(const FuncGraphPtr &root) {
  2655. MS_EXCEPTION_IF_NULL(root);
  2656. auto all_nodes = root->nodes();
  2657. auto graph_set = FindForwardGraphByRootNodes(all_nodes);
  2658. if (graph_set.empty()) {
  2659. MS_LOG(INFO) << "Can not find the forward graph, so mark the ops in root graph";
  2660. SetForwardFlag(all_nodes);
  2661. } else {
  2662. for (auto &func_graph : graph_set) {
  2663. MS_LOG(INFO) << "The sub graph size of root is " << root->func_graphs_used().size();
  2664. auto return_node = func_graph->get_return();
  2665. MS_EXCEPTION_IF_NULL(return_node);
  2666. auto all_dfs_nodes = DeepLinkedGraphSearch(return_node);
  2667. SetForwardFlag(all_dfs_nodes);
  2668. auto root_forward_nodes = FindRootForwardCNode(func_graph, all_nodes);
  2669. if (root_forward_nodes.empty()) {
  2670. continue;
  2671. }
  2672. // Mark forward flag for the nodes in root graph.
  2673. SetForwardFlag(root_forward_nodes);
  2674. }
  2675. }
  2676. }
  2677. Status ParallelInit() {
  2678. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2679. int64_t device_num = ParallelContext::GetInstance()->device_num();
  2680. int64_t global_rank = ParallelContext::GetInstance()->global_rank();
  2681. int32_t split_stage_num = ParallelContext::GetInstance()->pipeline_stage_split_num();
  2682. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2683. auto ms_context = MsContext::GetInstance();
  2684. MS_EXCEPTION_IF_NULL(ms_context);
  2685. std::string backend = ms_context->get_param<std::string>(MS_CTX_DEVICE_TARGET);
  2686. std::string world_group;
  2687. std::string communication_backend;
  2688. if (backend == kAscendDevice || backend == kDavinciDevice) {
  2689. world_group = HCCL_WORLD_GROUP;
  2690. communication_backend = HCCL_BACKEND;
  2691. } else if (backend == kGPUDevice) {
  2692. world_group = NCCL_WORLD_GROUP;
  2693. communication_backend = NCCL_BACKEND;
  2694. } else {
  2695. MS_LOG(ERROR) << "Invalid communication backend: " << backend;
  2696. return FAILED;
  2697. }
  2698. if (split_stage_num <= 0) {
  2699. MS_LOG(ERROR) << "Invalid stage num " << split_stage_num << ", expected a positive stage number";
  2700. return FAILED;
  2701. }
  2702. uint32_t world_rank_size = 0;
  2703. if (!ParallelContext::GetInstance()->device_num_is_set()) {
  2704. if (!CommManager::GetInstance().GetRankSize(world_group, &world_rank_size)) {
  2705. MS_LOG(EXCEPTION) << "Get rank size failed";
  2706. }
  2707. device_num = UintToInt(world_rank_size);
  2708. MS_LOG(INFO) << "Get device num from communication model, the device num is " << device_num;
  2709. }
  2710. uint32_t rank_id = 0;
  2711. if (!ParallelContext::GetInstance()->global_rank_is_set()) {
  2712. if (!CommManager::GetInstance().GetRankID(world_group, &rank_id)) {
  2713. MS_LOG(EXCEPTION) << "Get rank id failed";
  2714. }
  2715. global_rank = UintToInt(rank_id);
  2716. MS_LOG(INFO) << "Get global rank from communication model, the global rank is " << global_rank;
  2717. }
  2718. if ((device_num <= 0) || (device_num > MAX_DEVICE_NUM)) {
  2719. MS_LOG(ERROR) << "Invalid device num " << device_num;
  2720. return FAILED;
  2721. }
  2722. // the device_num maybe get from communication interface
  2723. if (device_num % split_stage_num != 0) {
  2724. MS_LOG(ERROR) << "Device num " << device_num << " can't be divided by stage num " << split_stage_num;
  2725. return FAILED;
  2726. }
  2727. if ((global_rank < 0) || (global_rank >= device_num)) {
  2728. MS_LOG(ERROR) << "Global rank " << global_rank << " is out of range, the device num is " << device_num;
  2729. return FAILED;
  2730. }
  2731. std::vector<int64_t> stages;
  2732. for (int i = 0; i < split_stage_num; i++) {
  2733. stages.push_back(device_num / split_stage_num);
  2734. }
  2735. if ((split_stage_num > 1) && (parallel_mode != SEMI_AUTO_PARALLEL)) {
  2736. MS_LOG(ERROR) << "To enable the pipeline parallel, please set the parallel mode to " << SEMI_AUTO_PARALLEL;
  2737. return FAILED;
  2738. }
  2739. if (!InitDevice(device_num, global_rank, communication_backend, stages)) {
  2740. MS_LOG(ERROR) << "Init device failed";
  2741. return FAILED;
  2742. }
  2743. MS_LOG(INFO) << "The parallel context: dev num: " << device_num << ", global rank: " << global_rank
  2744. << ", backend: " << backend << ", gradients_mean: " << ParallelContext::GetInstance()->gradients_mean()
  2745. << ", gradient_fp32_sync: " << ParallelContext::GetInstance()->gradient_fp32_sync();
  2746. return SUCCESS;
  2747. }
  2748. void HandleForwardMakeTupleAndMakeList(const std::vector<AnfNodePtr> &all_nodes) {
  2749. for (auto &node : all_nodes) {
  2750. if (!AnfNodeIsPrimitive(node, MAKE_TUPLE) && !AnfNodeIsPrimitive(node, MAKE_LIST)) {
  2751. continue;
  2752. }
  2753. auto cnode = node->cast<CNodePtr>();
  2754. MS_EXCEPTION_IF_NULL(cnode);
  2755. if (!cnode->in_forward_flag()) {
  2756. continue;
  2757. }
  2758. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  2759. MS_EXCEPTION_IF_NULL(manager);
  2760. std::string op_type = AnfNodeIsPrimitive(node, MAKE_TUPLE) ? MAKE_TUPLE : MAKE_LIST;
  2761. auto make_tuple_list_user = manager->node_users()[cnode];
  2762. if (make_tuple_list_user.size() != 1) {
  2763. MS_LOG(EXCEPTION) << "Now the " << op_type << "'s user must be 1, but got " << make_tuple_list_user.size();
  2764. }
  2765. CNodePtr make_tuple_list_next_cnode = make_tuple_list_user.pop().first->cast<CNodePtr>();
  2766. MS_EXCEPTION_IF_NULL(make_tuple_list_next_cnode);
  2767. std::string make_tuple__list_user_prim_name = GetPrimName(make_tuple_list_next_cnode);
  2768. if (!IsParallelCareNode(make_tuple_list_next_cnode)) {
  2769. MS_LOG(INFO) << "The " << op_type << "'s user is " << make_tuple__list_user_prim_name
  2770. << ", no need to set operator info";
  2771. continue;
  2772. }
  2773. if (make_tuple_list_next_cnode->inputs().size() != 2) {
  2774. MS_LOG(EXCEPTION) << "Now the " << op_type << "'s user only support 1 input, but got "
  2775. << make_tuple_list_next_cnode->inputs().size() - 1;
  2776. }
  2777. MS_LOG(INFO) << "Set the " << op_type << "'s operator info, and the op name is " << make_tuple__list_user_prim_name;
  2778. OperatorInfoPtr op_info = GetDistributeOperator(make_tuple_list_next_cnode);
  2779. MS_EXCEPTION_IF_NULL(op_info);
  2780. cnode->set_user_data<OperatorInfo>(op_info);
  2781. }
  2782. }
  2783. RefKeyPair CNodeWithRefKeys(const AnfNodePtr &cnode) {
  2784. MS_EXCEPTION_IF_NULL(cnode);
  2785. std::vector<AnfNodePtr> refkeys;
  2786. if (cnode->isa<CNode>()) {
  2787. auto cnode_ptr = cnode->cast<CNodePtr>();
  2788. auto inputs = cnode_ptr->inputs();
  2789. for (auto &one_input : inputs) {
  2790. if (IsValueNode<RefKey>(one_input)) {
  2791. refkeys.push_back(one_input);
  2792. }
  2793. }
  2794. if (refkeys.size() >= 1) {
  2795. return std::make_pair(cnode, refkeys);
  2796. }
  2797. }
  2798. return {nullptr, refkeys};
  2799. }
  2800. ParameterUsersInfo FindParameterNodeUsers(const AnfNodePtr &node, bool (*IsCareNode)(const CNodePtr &)) {
  2801. // In this case, node is a Parameter
  2802. ParameterUsersInfo parameter_user_info;
  2803. MS_EXCEPTION_IF_NULL(node->func_graph());
  2804. MS_EXCEPTION_IF_NULL(node->func_graph()->manager());
  2805. auto candidate_set = node->func_graph()->manager()->node_users()[node];
  2806. for (auto &candidate : candidate_set) {
  2807. auto candidate_node = candidate.first;
  2808. auto c = candidate_node->cast<CNodePtr>();
  2809. if (c == nullptr || !c->has_user_data<OperatorInfo>() || IsSomePrimitive(c, RECEIVE)) {
  2810. continue;
  2811. }
  2812. (void)parameter_user_info.second.second.insert(candidate);
  2813. }
  2814. parameter_user_info.first = node->cast<ParameterPtr>()->name();
  2815. parameter_user_info.second.first = node;
  2816. return parameter_user_info;
  2817. }
  2818. ParameterUsersInfo FindRefKeyNodeUsers(const RefKeyPair &ref_key_pair, bool (*IsCareNode)(const CNodePtr &)) {
  2819. // Dealing with the RefKey case
  2820. ParameterUsersInfo parameter_user_info;
  2821. auto refkeys = ref_key_pair.second;
  2822. auto cnode = ref_key_pair.first;
  2823. auto cnode_ptr = cnode->cast<CNodePtr>();
  2824. if ((cnode_ptr == nullptr) || !IsValueNode<Primitive>(cnode_ptr->input(0)) || !IsCareNode(cnode_ptr)) {
  2825. return parameter_user_info;
  2826. }
  2827. if (refkeys.size() > 1) {
  2828. MS_LOG(EXCEPTION) << "CNode: " << cnode->fullname_with_scope() << "'s inputs have more than 1 RefKeys";
  2829. }
  2830. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  2831. auto cnode_func_graph = cnode->func_graph();
  2832. MS_EXCEPTION_IF_NULL(cnode->func_graph()->manager());
  2833. // Find the RefKey being used
  2834. auto candidate_set_by_refkey = cnode_func_graph->manager()->node_users()[refkeys[0]];
  2835. for (auto &candidate : candidate_set_by_refkey) {
  2836. auto candidate_node = candidate.first;
  2837. auto c = candidate_node->cast<CNodePtr>();
  2838. if ((c == nullptr) || !IsValueNode<Primitive>(c->input(0)) || !IsCareNode(c)) {
  2839. continue;
  2840. }
  2841. parameter_user_info.second.second.add(candidate);
  2842. }
  2843. // Find the corresponding Parameter being used
  2844. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(refkeys[0], cnode_func_graph);
  2845. if (parameters.size() != 1) {
  2846. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  2847. }
  2848. parameter_user_info.first = parameters[0]->cast<ParameterPtr>()->name();
  2849. parameter_user_info.second.first = parameters[0];
  2850. auto candidate_set_by_para = cnode_func_graph->manager()->node_users()[parameters[0]];
  2851. for (auto &candidate : candidate_set_by_para) {
  2852. auto candidate_node = candidate.first;
  2853. auto c = candidate_node->cast<CNodePtr>();
  2854. if ((c == nullptr) || !IsValueNode<Primitive>(c->input(0)) || !IsCareNode(c)) {
  2855. continue;
  2856. }
  2857. (void)parameter_user_info.second.second.insert(candidate);
  2858. }
  2859. return parameter_user_info;
  2860. }
  2861. ParameterUsersInfo FindParameterUsers(const AnfNodePtr &node, bool (*IsCareNode)(const CNodePtr &)) {
  2862. ParameterUsersInfo parameter_users_info;
  2863. auto cnode_with_refkeys = CNodeWithRefKeys(node);
  2864. if (cnode_with_refkeys.first != nullptr) {
  2865. // the node is a ref key node
  2866. return FindRefKeyNodeUsers(cnode_with_refkeys, IsCareNode);
  2867. } else if (node->isa<Parameter>()) {
  2868. // the node is a parameter node
  2869. return FindParameterNodeUsers(node, IsCareNode);
  2870. }
  2871. return parameter_users_info;
  2872. }
  2873. Shape ParameterSliceShape(const std::pair<AnfNodePtr, int64_t> &param_info) {
  2874. auto user_cnode = param_info.first->cast<CNodePtr>();
  2875. MS_EXCEPTION_IF_NULL(user_cnode);
  2876. auto user_input_index = param_info.second;
  2877. OperatorInfoPtr op_info = user_cnode->user_data<OperatorInfo>();
  2878. MS_EXCEPTION_IF_NULL(op_info);
  2879. size_t input_tensor_info_size = op_info->inputs_tensor_info().size();
  2880. if (SizeToLong(input_tensor_info_size) <= user_input_index - 1) {
  2881. MS_LOG(EXCEPTION) << op_info->name() << ": the size of inputs tensor info is " << input_tensor_info_size
  2882. << ", but the index is " << user_input_index - 1;
  2883. }
  2884. TensorInfo tensor_info = op_info->inputs_tensor_info()[user_input_index - 1];
  2885. MS_LOG(DEBUG) << "The op name is " << op_info->name() << ", the parameter index is " << user_input_index - 1
  2886. << ", the slice shape is " << ShapeToString(tensor_info.slice_shape()) << ", the origin shape is "
  2887. << ShapeToString(tensor_info.shape());
  2888. return tensor_info.slice_shape();
  2889. }
  2890. void CheckParameterSplit(const std::vector<AnfNodePtr> &all_nodes) {
  2891. for (auto &node : all_nodes) {
  2892. ParameterUsersInfo parameter_users_info = FindParameterUsers(node, IsParallelCareNode);
  2893. auto users_set = parameter_users_info.second.second;
  2894. if (users_set.size() <= 1) {
  2895. continue;
  2896. }
  2897. auto parameter_name = parameter_users_info.first;
  2898. MS_LOG(INFO) << "The parameter: " << parameter_name << " has " << users_set.size() << " users";
  2899. auto first_user = users_set.pop();
  2900. Shape first_user_slice_shape = ParameterSliceShape(first_user);
  2901. for (auto &user : users_set) {
  2902. Shape user_slice_shape = ParameterSliceShape(user);
  2903. if (first_user_slice_shape != user_slice_shape) {
  2904. MS_LOG(EXCEPTION) << "The parameter: " << parameter_name
  2905. << " has multiple users, but the split strategies are different";
  2906. }
  2907. }
  2908. }
  2909. }
  2910. bool CreateGroupsByCkptFile(const std::string &file) {
  2911. GroupInfoMap group_info_map;
  2912. if (StrategyCheckpoint::GetInstance().LoadGroupInfo(file, &group_info_map) != SUCCESS) {
  2913. return false;
  2914. }
  2915. if (CreateGroups(group_info_map) != SUCCESS) {
  2916. return false;
  2917. }
  2918. MS_LOG(INFO) << "Create groups by checkpoint file success";
  2919. return true;
  2920. }
  2921. bool IsUsedParameter(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  2922. MS_EXCEPTION_IF_NULL(graph);
  2923. MS_EXCEPTION_IF_NULL(parameter);
  2924. auto manager = graph->manager();
  2925. auto node_users = manager->node_users()[parameter];
  2926. if (node_users.empty()) {
  2927. return false;
  2928. }
  2929. for (auto node_user : node_users) {
  2930. auto use_node = node_user.first->cast<CNodePtr>();
  2931. if (IsValueNode<FuncGraph>(use_node->input(0))) {
  2932. auto graph_sub = GetValueNode<FuncGraphPtr>(use_node->input(0));
  2933. auto parameters = graph_sub->parameters();
  2934. auto parameter_sub = parameters[node_user.second - 1];
  2935. return IsUsedParameter(graph_sub, parameter_sub);
  2936. }
  2937. if (use_node->input(0)->isa<CNode>()) {
  2938. auto cnode = use_node->input(0)->cast<CNodePtr>();
  2939. if (!IsSomePrimitive(cnode, J) || !IsValueNode<FuncGraph>(cnode->input(1))) {
  2940. return true;
  2941. }
  2942. auto graph_sub = GetValueNode<FuncGraphPtr>(cnode->input(1));
  2943. auto parameters = graph_sub->parameters();
  2944. auto parameter_sub = parameters[node_user.second - 1];
  2945. return IsUsedParameter(graph_sub, parameter_sub);
  2946. }
  2947. return true;
  2948. }
  2949. return true;
  2950. }
  2951. static void HandleNoUsedParameter(const FuncGraphPtr &root) {
  2952. MS_EXCEPTION_IF_NULL(root);
  2953. bool full_batch = ParallelContext::GetInstance()->full_batch();
  2954. if (full_batch) {
  2955. return;
  2956. }
  2957. auto dev_num = g_device_manager->stage_device_num();
  2958. auto parameters = root->parameters();
  2959. for (auto &parameter : parameters) {
  2960. if (IsUsedParameter(root, parameter)) {
  2961. continue;
  2962. }
  2963. auto parameter_shape = GetNodeShape(parameter);
  2964. if (parameter_shape.empty()) {
  2965. continue;
  2966. }
  2967. Shape slice_shape = parameter_shape[0];
  2968. if (slice_shape.empty()) {
  2969. continue;
  2970. }
  2971. slice_shape[0] = slice_shape[0] / dev_num;
  2972. auto slice_shape_ptr = std::make_shared<abstract::Shape>(slice_shape);
  2973. auto abstract = parameter->abstract();
  2974. MS_EXCEPTION_IF_NULL(abstract);
  2975. auto abstract_cloned = abstract->Clone();
  2976. MS_EXCEPTION_IF_NULL(abstract_cloned);
  2977. abstract_cloned->set_shape(slice_shape_ptr);
  2978. parameter->set_abstract(abstract_cloned);
  2979. }
  2980. }
  2981. bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) {
  2982. #if (ENABLE_CPU && (ENABLE_D || ENABLE_GPU))
  2983. if (ps::Util::IsRoleOfPServer() || ps::Util::IsRoleOfScheduler()) {
  2984. return false;
  2985. }
  2986. #endif
  2987. MS_EXCEPTION_IF_NULL(root);
  2988. MS_EXCEPTION_IF_NULL(optimizer);
  2989. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2990. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2991. // assume no change to graph
  2992. bool changes = false;
  2993. // control whether use model_parallel mode
  2994. if (!root->has_flag(AUTO_PARALLEL) || ((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) ||
  2995. (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) {
  2996. if (!root->has_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY)) {
  2997. if (HasStrategy(root)) {
  2998. MS_LOG(INFO) << "Strategies ignored in " << parallel_mode
  2999. << ", set_strategy() only valid in [semi_]auto_parallel.";
  3000. }
  3001. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  3002. }
  3003. return changes;
  3004. }
  3005. struct timeval start_time, end_time;
  3006. (void)gettimeofday(&start_time, nullptr);
  3007. MS_LOG(INFO) << "Now entering step parallel";
  3008. DumpGraph(root, std::string(STEP_PARALLEL_BEGIN));
  3009. pipeline::ResourceBasePtr res = optimizer->resource();
  3010. MS_EXCEPTION_IF_NULL(res);
  3011. FuncGraphManagerPtr manager = res->manager();
  3012. MS_EXCEPTION_IF_NULL(manager);
  3013. AnfNodePtr ret = root->get_return();
  3014. MS_EXCEPTION_IF_NULL(ret);
  3015. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  3016. std::reverse(all_nodes.begin(), all_nodes.end());
  3017. if (parallel_mode != AUTO_PARALLEL) {
  3018. TOTAL_OPS = 0;
  3019. auto pipeline_stages = ParallelContext::GetInstance()->pipeline_stage_split_num();
  3020. if (pipeline_stages <= 1 && ParallelInit() != SUCCESS) {
  3021. MS_LOG(EXCEPTION) << "Parallel init failed";
  3022. }
  3023. // mark the forward cnodes, parallel only care these nodes
  3024. MarkForwardCNode(root);
  3025. if (FindCommunicationOp(all_nodes)) {
  3026. MS_LOG(EXCEPTION) << "The graph contain communication op";
  3027. }
  3028. // extract shape and strategy, set operator_info
  3029. ExtractInformation(all_nodes, root->has_flag(TRAINING));
  3030. ReshapeInit(all_nodes);
  3031. }
  3032. HandleRootReshapeAndSaveStrategy(all_nodes);
  3033. HandleForwardMakeTupleAndMakeList(all_nodes);
  3034. // if the input or parameter has multiple users, check whether its split strategies are consistent.
  3035. CheckParameterSplit(all_nodes);
  3036. // save strategy as checkpoint for multi-train
  3037. if (StrategyCheckpoint::GetInstance().SaveCheckPointOn()) {
  3038. CheckpointStrategy(all_nodes);
  3039. }
  3040. HandleSymbolicKeyInstance(root, all_nodes);
  3041. // cover Parallel shape
  3042. CoverSliceShape(root);
  3043. // handle input is not used
  3044. HandleNoUsedParameter(root);
  3045. // set the shape for optimizer's clone tensor
  3046. SetClonedTensorShapeForOptimizer(root);
  3047. // ForwardCommunication BackwardCommunication TensorRedistribution
  3048. ParallelCommunication(root, all_nodes, manager);
  3049. auto group_info = g_device_manager->group_info();
  3050. if (StrategyCheckpoint::GetInstance().group_info_save_on() &&
  3051. StrategyCheckpoint::GetInstance().SaveGroupInfo(group_info) != SUCCESS) {
  3052. MS_LOG(EXCEPTION) << "Save group info failed";
  3053. }
  3054. DumpGraph(root, std::string(STEP_PARALLEL_END));
  3055. // step parallel only run once
  3056. root->set_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY, true);
  3057. res->results()[pipeline::kStepParallelGraph] = root;
  3058. // in auto parallel mode, no need to check if stategies set
  3059. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  3060. (void)gettimeofday(&end_time, nullptr);
  3061. uint64_t time = kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec - start_time.tv_sec);
  3062. time += static_cast<uint64_t>(end_time.tv_usec - start_time.tv_usec);
  3063. MS_LOG(INFO) << "Now leaving step parallel, used time: " << time << " us";
  3064. return changes;
  3065. }
  3066. // Needed by rec_parser
  3067. std::vector<std::string> ExtractInputsTensorName(const CNodePtr &node) {
  3068. std::vector<std::string> name_inputs;
  3069. std::vector<AnfNodePtr> all_inputs = node->inputs();
  3070. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  3071. std::string node_id = node->UniqueId();
  3072. name_inputs.push_back(node_id);
  3073. for (auto &input : node_inputs) {
  3074. std::string name = input->UniqueId();
  3075. name_inputs.push_back(name);
  3076. }
  3077. return name_inputs;
  3078. }
  3079. } // namespace parallel
  3080. } // namespace mindspore