You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

step_parallel.cc 136 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "frontend/parallel/step_parallel.h"
  17. #include <inttypes.h>
  18. #include <sys/time.h>
  19. #include <algorithm>
  20. #include <map>
  21. #include <memory>
  22. #include <set>
  23. #include <string>
  24. #include <unordered_map>
  25. #include <utility>
  26. #include "base/core_ops.h"
  27. #include "frontend/operator/ops.h"
  28. #include "frontend/optimizer/optimizer.h"
  29. #include "frontend/parallel/auto_parallel/graph_costmodel.h"
  30. #include "frontend/parallel/context.h"
  31. #include "frontend/parallel/device_manager.h"
  32. #include "frontend/parallel/dynamic_creator.h"
  33. #include "frontend/parallel/graph_util/generate_graph.h"
  34. #include "frontend/parallel/graph_util/graph_info.h"
  35. #include "frontend/parallel/graph_util/node_info.h"
  36. #include "frontend/parallel/node_check.h"
  37. #include "frontend/parallel/ops_info/matmul_info.h"
  38. #include "frontend/parallel/strategy_checkpoint/parallel_strategy_checkpoint.h"
  39. #include "ir/param_info.h"
  40. #include "ir/tensor.h"
  41. #include "utils/comm_manager.h"
  42. #include "utils/ms_context.h"
  43. #include "utils/symbolic.h"
  44. #include "mindspore/core/utils/parallel_node_check.h"
  45. #if (ENABLE_CPU && (ENABLE_D || ENABLE_GPU))
  46. #include "ps/util.h"
  47. #endif
  48. using mindspore::tensor::Tensor;
  49. namespace mindspore {
  50. namespace parallel {
  51. static const std::set<std::string> COMMUNICATION_OPS = {ALL_REDUCE, ALL_GATHER, ALL_TO_ALL, REDUCE_SCATTER};
  52. static const std::set<std::string> INVALID_LOSS_OPS = {GET_NEXT, VIRTUALLOSS, LOAD, UPDATESTATE};
  53. // g_RefMap, for CNode B input i is a RefKey[Parameter C],
  54. // it will be one item in map with key: C, and value: (B, i)
  55. static std::map<AnfNodePtr, std::pair<AnfNodePtr, int64_t>> g_RefMap;
  56. void SetCommunicationOpGroupLabel(std::vector<AnfNodePtr> new_node_input) {
  57. if (new_node_input.empty()) {
  58. return;
  59. }
  60. ValueNodePtr prim_anf_node = new_node_input[0]->cast<ValueNodePtr>();
  61. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  62. MS_EXCEPTION_IF_NULL(prim);
  63. auto attrs = prim->attrs();
  64. auto iter = attrs.find(GROUP);
  65. if (iter != attrs.end()) {
  66. auto value = iter->second;
  67. MS_EXCEPTION_IF_NULL(value);
  68. if (value->isa<StringImm>()) {
  69. std::string hash_name = value->cast<StringImmPtr>()->value();
  70. MS_EXCEPTION_IF_NULL(g_device_manager);
  71. std::string rank_list_name = g_device_manager->FindRankListNameByHashName(hash_name);
  72. (void)prim->AddAttr(GROUP_RANKS, MakeValue(rank_list_name));
  73. }
  74. }
  75. }
  76. std::vector<AnfNodePtr> CreateInput(const Operator &op, const AnfNodePtr &node, const std::string &instance_name) {
  77. MS_EXCEPTION_IF_NULL(node);
  78. OperatorArgs arg_forward = op.second;
  79. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op.first, instance_name);
  80. MS_EXCEPTION_IF_NULL(pyop_instance);
  81. OperatorParams params = arg_forward.second;
  82. std::vector<AnfNodePtr> new_node_input = {NewValueNode(pyop_instance), node};
  83. if (!params.empty()) {
  84. for (auto &param : params) {
  85. AnfNodePtr val = NewValueNode(param.first.second);
  86. MS_EXCEPTION_IF_NULL(val);
  87. int64_t position = param.second;
  88. (void)new_node_input.insert(new_node_input.begin() + position, val);
  89. }
  90. }
  91. // if the op have 'group' attr, set the rank list name for the op
  92. SetCommunicationOpGroupLabel(new_node_input);
  93. return new_node_input;
  94. }
  95. void InsertNode(const Operator &op, const CNodePtr &node, size_t index, const AnfNodePtr &pre_node,
  96. const FuncGraphPtr &func_graph, const std::string &instance_name) {
  97. // insert new node before the node
  98. FuncGraphManagerPtr manager = func_graph->manager();
  99. MS_EXCEPTION_IF_NULL(manager);
  100. ScopePtr scope = node->scope();
  101. MS_EXCEPTION_IF_NULL(scope);
  102. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  103. CNodePtr new_node = func_graph->NewCNode(node_input);
  104. MS_EXCEPTION_IF_NULL(new_node);
  105. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  106. new_node->set_in_forward_flag(true); // mark forward flag
  107. }
  108. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  109. MS_EXCEPTION_IF_NULL(new_node_value);
  110. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  111. new_node_prim->set_instance_name(instance_name);
  112. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  113. new_node->set_scope(scope);
  114. node_input[0]->set_scope(scope);
  115. manager->SetEdge(node, SizeToLong(index), new_node);
  116. MS_LOG(INFO) << "Insert " << instance_name << " success";
  117. }
  118. bool ParameterIsCloned(const AnfNodePtr &parameter_node) {
  119. MS_EXCEPTION_IF_NULL(parameter_node);
  120. auto cloned_parameter = parameter_node->cast<ParameterPtr>();
  121. MS_EXCEPTION_IF_NULL(cloned_parameter);
  122. // find the clone parameter
  123. if (!cloned_parameter->has_default()) {
  124. return false;
  125. }
  126. auto param_value = cloned_parameter->param_info();
  127. if (param_value == nullptr) {
  128. return false;
  129. }
  130. bool cloned = param_value->cloned();
  131. if (!cloned) {
  132. return false;
  133. }
  134. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name() << " is cloned";
  135. return true;
  136. }
  137. std::vector<AnfNodePtr> CreateMirrorInput(const FuncGraphPtr &root, const Operator &op, const AnfNodePtr &node,
  138. const std::string &instance_name, const std::string &weight_name) {
  139. MS_EXCEPTION_IF_NULL(root);
  140. MS_EXCEPTION_IF_NULL(node);
  141. MS_EXCEPTION_IF_NULL(root->manager());
  142. AnfNodePtr local_step_param = nullptr;
  143. AnfNodePtr grad_accu = nullptr;
  144. std::string op_name = op.first;
  145. OperatorArgs arg_forward = op.second;
  146. int64_t grad_accumulation_step = ParallelContext::GetInstance()->grad_accumulation_step();
  147. if (grad_accumulation_step > 1) {
  148. bool find_locat_step_node = false;
  149. auto parameters = root->parameters();
  150. for (auto &param : parameters) {
  151. auto param_ptr = param->cast<ParameterPtr>();
  152. MS_EXCEPTION_IF_NULL(param_ptr);
  153. if (param_ptr->name() == LOCAL_STEP) {
  154. auto param_users = root->manager()->node_users()[param];
  155. for (auto &user : param_users) {
  156. if (AnfNodeIsPrimitive(user.first, ASSIGN)) {
  157. find_locat_step_node = true;
  158. local_step_param = user.first;
  159. MS_LOG(INFO) << "Find the local step when create mirror, it may be in the mini step grad accumulation mode";
  160. break;
  161. }
  162. }
  163. break;
  164. }
  165. }
  166. bool find_grad_accu_node = false;
  167. for (auto &param : parameters) {
  168. if (!ParameterIsCloned(param)) {
  169. continue;
  170. }
  171. auto param_ptr = param->cast<ParameterPtr>();
  172. MS_EXCEPTION_IF_NULL(param_ptr);
  173. if (param_ptr->name().find(weight_name) != std::string::npos &&
  174. param_ptr->name().find(ACCU_GRADS) != std::string::npos) {
  175. find_grad_accu_node = true;
  176. grad_accu = param;
  177. MS_LOG(INFO) << "Find the accumulation grad node: " << param_ptr->name();
  178. break;
  179. }
  180. }
  181. if (op_name == MIRROR_MINI_STEP_OPERATOR) {
  182. if (!find_locat_step_node || !find_grad_accu_node) {
  183. op_name = MIRROR_OPERATOR;
  184. arg_forward.first.pop_back();
  185. }
  186. }
  187. }
  188. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op_name, instance_name);
  189. MS_EXCEPTION_IF_NULL(pyop_instance);
  190. OperatorParams params = arg_forward.second;
  191. std::vector<AnfNodePtr> new_node_input;
  192. if (op_name == MIRROR_MINI_STEP_OPERATOR) {
  193. new_node_input = {NewValueNode(pyop_instance), node, local_step_param, grad_accu};
  194. MS_LOG(INFO) << "Insert the local step node and grad accumulation node as the mirror op's input";
  195. } else {
  196. new_node_input = {NewValueNode(pyop_instance), node};
  197. }
  198. if (!params.empty()) {
  199. for (auto &param : params) {
  200. AnfNodePtr val = NewValueNode(param.first.second);
  201. MS_EXCEPTION_IF_NULL(val);
  202. int64_t position = param.second;
  203. (void)new_node_input.insert(new_node_input.begin() + position, val);
  204. }
  205. }
  206. // if the op have 'group' attr, set the rank list name for the op
  207. SetCommunicationOpGroupLabel(new_node_input);
  208. return new_node_input;
  209. }
  210. void InsertMirrorNode(const FuncGraphPtr &root, const Operator &op, const CNodePtr &node, size_t index,
  211. const AnfNodePtr &pre_node, const FuncGraphPtr &func_graph, const std::string &instance_name,
  212. const std::string &param_name) {
  213. // insert new node before the node
  214. FuncGraphManagerPtr manager = func_graph->manager();
  215. MS_EXCEPTION_IF_NULL(manager);
  216. ScopePtr scope = node->scope();
  217. MS_EXCEPTION_IF_NULL(scope);
  218. std::vector<AnfNodePtr> node_input = CreateMirrorInput(root, op, pre_node, instance_name, param_name);
  219. CNodePtr new_node = func_graph->NewCNode(node_input);
  220. MS_EXCEPTION_IF_NULL(new_node);
  221. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  222. new_node->set_in_forward_flag(true); // mark forward flag
  223. }
  224. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  225. MS_EXCEPTION_IF_NULL(new_node_value);
  226. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  227. new_node_prim->set_instance_name(instance_name);
  228. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  229. new_node->set_scope(scope);
  230. node_input[0]->set_scope(scope);
  231. manager->SetEdge(node, SizeToLong(index), new_node);
  232. MS_LOG(INFO) << "Insert " << instance_name << " success";
  233. }
  234. // Replace pre_node with pre_node->op
  235. static CNodePtr ReplaceNode(const Operator &op, const AnfNodePtr &pre_node, const FuncGraphPtr &func_graph,
  236. const std::string &instance_name) {
  237. // insert new node before the node
  238. FuncGraphManagerPtr manager = func_graph->manager();
  239. MS_EXCEPTION_IF_NULL(manager);
  240. ScopePtr scope = pre_node->scope();
  241. MS_EXCEPTION_IF_NULL(scope);
  242. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  243. CNodePtr new_node = func_graph->NewCNode(node_input);
  244. MS_EXCEPTION_IF_NULL(new_node);
  245. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  246. new_node->set_in_forward_flag(true); // mark forward flag
  247. }
  248. auto new_node_prim = GetValueNode<PrimitivePtr>(node_input[0]);
  249. new_node_prim->set_instance_name(instance_name);
  250. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  251. new_node->set_scope(scope);
  252. node_input[0]->set_scope(scope);
  253. manager->Replace(pre_node, new_node);
  254. MS_LOG(INFO) << "Insert " << instance_name << " success";
  255. return new_node;
  256. }
  257. std::string CreateInstanceName(const CNodePtr &node, size_t index) {
  258. MS_EXCEPTION_IF_NULL(node);
  259. if (!IsValueNode<Primitive>(node->input(0))) {
  260. MS_LOG(EXCEPTION) << "CreateInstanceName: " << node->ToString() << " doesn't have primitive";
  261. }
  262. std::string name_base = node->fullname_with_scope();
  263. std::string name = name_base + "_" + std::to_string(index);
  264. std::string instance_name = HashInstanceName(name);
  265. return instance_name;
  266. }
  267. void ForwardCommunication(OperatorVector forward_op, const CNodePtr &node) {
  268. MS_EXCEPTION_IF_NULL(node);
  269. // step1:get graph manager distribute_operator
  270. FuncGraphPtr func_graph = node->func_graph();
  271. MS_EXCEPTION_IF_NULL(func_graph);
  272. FuncGraphManagerPtr manager = func_graph->manager();
  273. MS_EXCEPTION_IF_NULL(manager);
  274. auto uses_set = manager->node_users()[node];
  275. CNodePtr node_to_insert = node;
  276. for (auto &uses_pair : uses_set) {
  277. auto uses_cnode = uses_pair.first->cast<CNodePtr>();
  278. MS_EXCEPTION_IF_NULL(uses_cnode);
  279. if (!IsValueNode<Primitive>(uses_cnode->input(0))) {
  280. break;
  281. }
  282. PrimitivePtr value_node_prim = GetValueNode<PrimitivePtr>(uses_cnode->input(0));
  283. MS_EXCEPTION_IF_NULL(value_node_prim);
  284. if (value_node_prim->name() == prim::kTupleGetItem) {
  285. if (uses_set.size() > 1) {
  286. MS_LOG(EXCEPTION) << "Now only support one output, but got " << uses_set.size();
  287. }
  288. node_to_insert = uses_cnode;
  289. }
  290. }
  291. MS_EXCEPTION_IF_NULL(node_to_insert);
  292. std::reverse(forward_op.begin(), forward_op.end());
  293. // step2:traverse op_list and insert node
  294. for (size_t index = 0; index < forward_op.size(); ++index) {
  295. std::string instance_name_base = FORWARD_OP;
  296. std::string instance_name = instance_name_base + "_" + CreateInstanceName(node, index);
  297. std::vector<AnfNodePtr> forward_input = CreateInput(forward_op[index], node_to_insert, instance_name);
  298. CNodePtr forward_node = func_graph->NewCNode(forward_input); // using NewCNode to create anfnode
  299. MS_EXCEPTION_IF_NULL(forward_node);
  300. ScopePtr scope = node->scope();
  301. MS_EXCEPTION_IF_NULL(scope);
  302. forward_node->set_scope(scope);
  303. forward_node->set_in_forward_flag(true);
  304. forward_input[0]->set_scope(scope);
  305. (void)manager->Replace(node_to_insert, forward_node); // using Replace function to insert node
  306. }
  307. }
  308. CNodePtr InsertMakeTuple(const AnfNodePtr &prev, uint64_t num, const FuncGraphPtr &func_graph) {
  309. MS_EXCEPTION_IF_NULL(prev);
  310. MS_EXCEPTION_IF_NULL(func_graph);
  311. std::vector<AnfNodePtr> make_tuple_inputs;
  312. make_tuple_inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
  313. for (uint64_t i = 0; i < num; i++) {
  314. std::vector<AnfNodePtr> tuple_get_item_inputs{NewValueNode(prim::kPrimTupleGetItem), prev,
  315. CreatInt64Imm(UlongToLong(i))};
  316. auto tuple_get_item = func_graph->NewCNode(tuple_get_item_inputs);
  317. MS_EXCEPTION_IF_NULL(tuple_get_item);
  318. make_tuple_inputs.push_back(tuple_get_item);
  319. }
  320. auto make_tuple = func_graph->NewCNode(make_tuple_inputs);
  321. MS_EXCEPTION_IF_NULL(make_tuple);
  322. FuncGraphManagerPtr manager = func_graph->manager();
  323. MS_EXCEPTION_IF_NULL(manager);
  324. (void)manager->Replace(prev, make_tuple);
  325. return make_tuple;
  326. }
  327. void InsertRedistribution(const RedistributionOpListPtr &redistribution_oplist_ptr, const CNodePtr &node,
  328. const FuncGraphPtr &func_graph, int64_t pos, const CNodePtr &pre_node) {
  329. MS_EXCEPTION_IF_NULL(node);
  330. MS_EXCEPTION_IF_NULL(pre_node);
  331. MS_EXCEPTION_IF_NULL(func_graph);
  332. FuncGraphManagerPtr manager = func_graph->manager();
  333. MS_EXCEPTION_IF_NULL(manager);
  334. if ((redistribution_oplist_ptr->first).size() != (redistribution_oplist_ptr->second).size()) {
  335. MS_LOG(EXCEPTION) << "size of OperatorVector and OutPutInfoVector must be the same!";
  336. }
  337. for (size_t index = 0; index < (redistribution_oplist_ptr->first).size(); ++index) {
  338. if (pos >= SizeToLong(node->inputs().size())) {
  339. MS_LOG(EXCEPTION) << "InsertRedistribution:pos can't be larger than node's inputs'size";
  340. }
  341. // Create new node
  342. AnfNodePtr target_node = node->input(LongToSize(pos));
  343. MS_EXCEPTION_IF_NULL(target_node);
  344. // Create instance_name
  345. auto op = (redistribution_oplist_ptr->first)[index];
  346. std::string op_name = (redistribution_oplist_ptr->first)[index].first;
  347. std::string instance_name_base = REDISTRIBUTION_OP;
  348. std::string instance_name = instance_name_base + "_" + CreateInstanceName(pre_node, index) + op_name;
  349. InsertNode(op, node, LongToSize(pos), target_node, func_graph, instance_name);
  350. if ((redistribution_oplist_ptr->second)[index].first) {
  351. target_node = node->input(LongToSize(pos));
  352. MS_EXCEPTION_IF_NULL(target_node);
  353. (void)InsertMakeTuple(target_node, (redistribution_oplist_ptr->second)[index].second, func_graph);
  354. }
  355. }
  356. }
  357. void InsertGetTensorSliceOp(const Operator &op, const CNodePtr &node, const FuncGraphPtr &func_graph, int64_t pos,
  358. const std::string &instance_name) {
  359. if (func_graph == nullptr) {
  360. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: the graph is null, the instance name is " << instance_name;
  361. }
  362. FuncGraphManagerPtr manager = func_graph->manager();
  363. MS_EXCEPTION_IF_NULL(manager);
  364. if (pos >= SizeToLong(node->inputs().size())) {
  365. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: pos can't be larger than node's inputs'size, the instance name is "
  366. << instance_name;
  367. }
  368. // Create new node
  369. AnfNodePtr pre_node = node->input(LongToSize(pos));
  370. MS_EXCEPTION_IF_NULL(pre_node);
  371. InsertNode(op, node, LongToSize(pos), pre_node, func_graph, instance_name);
  372. }
  373. TensorLayout GetTensorInLayout(const CNodePtr &middle_node, const PrimitivePtr &middle_prim,
  374. const OperatorInfoPtr &distribute_operator) {
  375. TensorInfo tensorinfo_in;
  376. if (middle_prim->name() == prim::kTupleGetItem) {
  377. auto value_node = middle_node->input(2)->cast<ValueNodePtr>();
  378. MS_EXCEPTION_IF_NULL(value_node);
  379. size_t index_s = LongToSize(GetValue<int64_t>(value_node->value()));
  380. if (index_s >= distribute_operator->outputs_tensor_info().size()) {
  381. MS_LOG(EXCEPTION) << "The index out of range, index: " << index_s
  382. << ", vector size: " << distribute_operator->outputs_tensor_info().size();
  383. }
  384. tensorinfo_in = distribute_operator->outputs_tensor_info()[index_s];
  385. } else {
  386. if (distribute_operator->outputs_tensor_info().empty()) {
  387. MS_LOG(EXCEPTION) << "The outputs tensor info is empty";
  388. }
  389. tensorinfo_in = distribute_operator->outputs_tensor_info()[0];
  390. }
  391. return tensorinfo_in.tensor_layout();
  392. }
  393. std::string GetPrimName(const CNodePtr &node) {
  394. MS_EXCEPTION_IF_NULL(node);
  395. if (!IsValueNode<Primitive>(node->input(0))) {
  396. MS_LOG(EXCEPTION) << "The node is not a primitive";
  397. }
  398. auto value_node = node->input(0)->cast<ValueNodePtr>();
  399. auto prim = GetValueNode<PrimitivePtr>(value_node);
  400. MS_EXCEPTION_IF_NULL(prim);
  401. return prim->name();
  402. }
  403. OperatorInfoPtr GetDistributeOperator(const CNodePtr &node) {
  404. MS_EXCEPTION_IF_NULL(node);
  405. if (!IsParallelCareNode(node)) {
  406. return nullptr;
  407. }
  408. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  409. if (distribute_operator == nullptr) {
  410. MS_LOG(EXCEPTION) << "Distribute operator is nullptr, the prim is " << GetPrimName(node);
  411. }
  412. return distribute_operator;
  413. }
  414. void Redistribution(const std::pair<AnfNodePtr, int64_t> &node_pair, const OperatorInfoPtr &distribute_operator,
  415. const CNodePtr &middle_node, int64_t index, TensorRedistribution tensor_redistribution,
  416. const CNodePtr &pre_node) {
  417. FuncGraphPtr func_graph = middle_node->func_graph();
  418. if (func_graph == nullptr) {
  419. MS_LOG(EXCEPTION) << "Redistribution:get graph failed";
  420. }
  421. CNodePtr next_node = node_pair.first->cast<CNodePtr>();
  422. MS_EXCEPTION_IF_NULL(next_node);
  423. auto middle_value = middle_node->input(0)->cast<ValueNodePtr>();
  424. MS_EXCEPTION_IF_NULL(middle_value);
  425. PrimitivePtr middle_prim = middle_value->value()->cast<PrimitivePtr>();
  426. MS_EXCEPTION_IF_NULL(middle_prim);
  427. OperatorInfoPtr next_distribute_operator = GetDistributeOperator(next_node);
  428. if (next_distribute_operator == nullptr) {
  429. MS_LOG(EXCEPTION) << "Failure: " << next_node->ToString() << " GetDistributeOperator failed";
  430. }
  431. RankList dev_list = distribute_operator->stage_device_list();
  432. std::string next_prim_name = GetValueNode<PrimitivePtr>(next_node->input(0))->name();
  433. MS_LOG(DEBUG) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim " << next_prim_name;
  434. MS_LOG(DEBUG) << "Redistribution: middle_node " << middle_node->ToString() << " next_node " << next_node->ToString();
  435. // extract tensor layout in and out
  436. if (distribute_operator->outputs_tensor_info().empty()) {
  437. MS_LOG(WARNING) << "pre_node's tensorinfo_in is empty, operator name is " << distribute_operator->name();
  438. return;
  439. }
  440. if (LongToSize(index - 1) >= next_distribute_operator->inputs_tensor_info().size()) {
  441. MS_LOG(WARNING) << "The index is out of range, the index is " << index - 1 << ", the vector size is "
  442. << next_distribute_operator->inputs_tensor_info().size() << "next operator name is "
  443. << next_distribute_operator->name();
  444. return;
  445. }
  446. TensorInfo tensorinfo_out = next_distribute_operator->inputs_tensor_info()[LongToSize(index - 1)];
  447. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  448. TensorLayout tensorlayout_in = GetTensorInLayout(middle_node, middle_prim, distribute_operator);
  449. if (tensor_redistribution.Init(tensorlayout_in, tensorlayout_out, dev_list) == FAILED) {
  450. MS_LOG(ERROR) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim : " << next_prim_name;
  451. MS_LOG(ERROR) << "Redistribution: middle_node " << middle_node->ToString() << " next_node "
  452. << next_node->ToString();
  453. DumpGraph(func_graph, "redistribution_error");
  454. MS_LOG(EXCEPTION) << "Failure:tensor_redistribution init failed";
  455. }
  456. RedistributionOpListPtr redistribution_oplist_ptr = tensor_redistribution.InferTensorRedistributionOperatorList();
  457. if (redistribution_oplist_ptr == nullptr) {
  458. MS_LOG(EXCEPTION) << "Failure:InferTensorRedistribution failed";
  459. }
  460. MS_LOG(DEBUG) << "Redistribution size " << redistribution_oplist_ptr->first.size();
  461. if (!redistribution_oplist_ptr->first.empty()) {
  462. // insert node before next node
  463. InsertRedistribution(redistribution_oplist_ptr, next_node, func_graph, node_pair.second, pre_node);
  464. }
  465. }
  466. bool StrategyFound(std::unordered_map<std::string, ValuePtr> attrs) {
  467. auto iter = attrs.find(STRATEGY);
  468. return !((iter == attrs.end()) || (iter->second->type_name() == NONE));
  469. }
  470. bool HasStrategy(const FuncGraphPtr &root) {
  471. AnfNodePtr ret = root->get_return();
  472. MS_EXCEPTION_IF_NULL(ret);
  473. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  474. for (auto &node : all_nodes) {
  475. auto cnode = node->cast<CNodePtr>();
  476. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  477. continue;
  478. }
  479. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  480. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  481. auto attrs = prim->attrs();
  482. if (StrategyFound(attrs)) {
  483. return true;
  484. }
  485. }
  486. return false;
  487. }
  488. bool IsCommunicationOp(const PrimitivePtr &prim) {
  489. MS_EXCEPTION_IF_NULL(prim);
  490. return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end());
  491. }
  492. bool FindCommunicationOp(const std::vector<AnfNodePtr> &all_nodes) {
  493. for (auto &node : all_nodes) {
  494. MS_EXCEPTION_IF_NULL(node);
  495. if (!node->isa<CNode>()) {
  496. continue;
  497. }
  498. auto cnode = node->cast<CNodePtr>();
  499. if (!IsValueNode<Primitive>(cnode->input(0))) {
  500. continue;
  501. }
  502. ValueNodePtr prim_value_node = cnode->input(0)->cast<ValueNodePtr>();
  503. MS_EXCEPTION_IF_NULL(prim_value_node);
  504. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_value_node);
  505. MS_EXCEPTION_IF_NULL(prim);
  506. if (IsCommunicationOp(prim) && cnode->in_forward_flag()) {
  507. MS_EXCEPTION_IF_NULL(prim_value_node->scope());
  508. MS_LOG(INFO) << "The graph contain communication op: " << prim->name() << ", scope name is "
  509. << prim_value_node->scope()->name();
  510. return true;
  511. }
  512. }
  513. return false;
  514. }
  515. bool IsParallelCareNode(const CNodePtr &cnode) {
  516. MS_EXCEPTION_IF_NULL(cnode);
  517. ValueNodePtr prim_node = cnode->input(0)->cast<ValueNodePtr>();
  518. if (prim_node == nullptr) {
  519. return false;
  520. }
  521. PrimitivePtr prim = prim_node->value()->cast<PrimitivePtr>();
  522. if (prim == nullptr) {
  523. return false;
  524. }
  525. if (IsInParallelBlackList(prim)) {
  526. MS_LOG(DEBUG) << "Parallel don't care node: " << prim->name();
  527. return false;
  528. }
  529. // get_next is not in the forward graph, we need mark the get_next as the forward node
  530. if (prim->name() == GET_NEXT) {
  531. return true;
  532. }
  533. if ((prim->name() == CAST) && !cnode->has_user_data<OperatorInfo>()) {
  534. return false;
  535. }
  536. return cnode->in_forward_flag();
  537. }
  538. void StepRedistribution(const CNodePtr &node, const OperatorInfoPtr &distribute_operator, const CNodePtr &insert_node,
  539. const TensorRedistribution &tensor_redistribution, const CNodePtr &pre_node) {
  540. MS_EXCEPTION_IF_NULL(node->func_graph());
  541. FuncGraphManagerPtr manager = node->func_graph()->manager();
  542. MS_EXCEPTION_IF_NULL(manager);
  543. AnfNodeIndexSet node_set = manager->node_users()[node];
  544. CNodePtr insert_node_new;
  545. if (AnfNodeIsPrimitive(node, MAKE_TUPLE) || AnfNodeIsPrimitive(node, MAKE_LIST)) {
  546. MS_LOG(INFO) << "No need to insert redistribution op between make_tuple node and the next node";
  547. return;
  548. }
  549. if (IsValueNode<Primitive>(node->input(0))) {
  550. auto current_value = node->input(0)->cast<ValueNodePtr>();
  551. MS_EXCEPTION_IF_NULL(current_value);
  552. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  553. MS_EXCEPTION_IF_NULL(current_prim);
  554. insert_node_new = ((current_prim->name() == prim::kTupleGetItem) ? node : insert_node);
  555. } else {
  556. insert_node_new = insert_node;
  557. }
  558. MS_EXCEPTION_IF_NULL(insert_node_new);
  559. for (auto &node_pair : node_set) {
  560. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  561. MS_EXCEPTION_IF_NULL(use_cnode);
  562. if (!IsValueNode<Primitive>(use_cnode->input(0))) {
  563. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  564. } else {
  565. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  566. MS_EXCEPTION_IF_NULL(prim_anf_node);
  567. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  568. MS_EXCEPTION_IF_NULL(node_prim);
  569. if ((node_prim->name() == DEPEND && node_pair.second != 1) || node_prim->name() == UPDATESTATE) {
  570. continue;
  571. }
  572. if (IsParallelCareNode(use_cnode) && use_cnode->has_user_data<OperatorInfo>()) {
  573. Redistribution(node_pair, distribute_operator, insert_node_new, node_pair.second, tensor_redistribution,
  574. pre_node);
  575. } else {
  576. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  577. }
  578. }
  579. }
  580. }
  581. void SplitTensor(const AnfNodePtr &node, const CNodePtr &next_node, int64_t index) {
  582. MS_EXCEPTION_IF_NULL(node);
  583. MS_EXCEPTION_IF_NULL(next_node);
  584. OperatorInfoPtr op_info = next_node->user_data<OperatorInfo>();
  585. MS_EXCEPTION_IF_NULL(op_info);
  586. // If the shape of tensor is [] or [1], no need to split it.
  587. Shapes shapes = GetNodeShape(node);
  588. if (shapes.size() != 1) {
  589. MS_LOG(EXCEPTION) << "Split tensor for " << op_info->name()
  590. << ": GetNodeShape for tensor_node, output size is not 1";
  591. }
  592. Shape shape = shapes[0];
  593. std::string shape_str = ShapeToString(shape);
  594. if (shape.empty() || ((shape.size() == 1) && (shape[0] == 1))) {
  595. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape is " << shape_str
  596. << ", no need to split it.";
  597. return;
  598. }
  599. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape of tensor is " << shape_str;
  600. // extract tensor layout
  601. if (LongToSize(index - 1) >= op_info->inputs_tensor_info().size()) {
  602. MS_LOG(EXCEPTION) << "The index is out of range, index is " << index - 1 << ", vector size is "
  603. << op_info->inputs_tensor_info().size();
  604. }
  605. TensorInfo tensor_info = op_info->inputs_tensor_info()[LongToSize(index - 1)];
  606. TensorLayout tensor_layout = tensor_info.tensor_layout();
  607. // Use _GetTensorSlice operator to split the tensor
  608. FuncGraphPtr func_graph = next_node->func_graph(); // only cnode can get the graph
  609. MS_EXCEPTION_IF_NULL(func_graph);
  610. Operator op = CreateGetTensorSliceOp(tensor_layout);
  611. InsertGetTensorSliceOp(op, next_node, func_graph, index, SPLIT_TENSOR);
  612. if (!op_info->sub_ops().empty()) {
  613. auto sub_ops = op_info->sub_ops();
  614. for (size_t i = 0; i < sub_ops.size(); i++) {
  615. if (!sub_ops.at(i).empty()) {
  616. InsertGetTensorSliceOp(sub_ops.at(i).at(0), next_node, func_graph, index, SUB);
  617. }
  618. }
  619. }
  620. }
  621. void SplitTensorList(const AnfNodePtr &node, const CNodePtr &next_node, int index) {
  622. MS_EXCEPTION_IF_NULL(node);
  623. MS_EXCEPTION_IF_NULL(next_node);
  624. if (next_node->inputs().size() != 2 || index != 1) {
  625. MS_LOG(INFO) << next_node->fullname_with_scope() << " Inputs must have only one input, get "
  626. << next_node->inputs().size() - 1 << " index should be 1, get " << index;
  627. return;
  628. }
  629. OperatorInfoPtr op_info = next_node->user_data<OperatorInfo>();
  630. MS_EXCEPTION_IF_NULL(op_info);
  631. std::vector<ValuePtr> inputs_values;
  632. if (IsValueNode<ValueList>(node)) {
  633. inputs_values = node->cast<ValueNodePtr>()->value()->cast<ValueListPtr>()->value();
  634. } else {
  635. inputs_values = node->cast<ValueNodePtr>()->value()->cast<ValueTuplePtr>()->value();
  636. }
  637. if (inputs_values.size() != op_info->inputs_tensor_info().size()) {
  638. MS_LOG(EXCEPTION) << "The inputs size " << inputs_values.size() << ", is not equal to inputs shape size "
  639. << op_info->inputs_tensor_info().size();
  640. }
  641. std::vector<AnfNodePtr> make_tuple_inputs = {NewValueNode(prim::kPrimMakeTuple)};
  642. FuncGraphPtr func_graph = next_node->func_graph();
  643. MS_EXCEPTION_IF_NULL(func_graph);
  644. FuncGraphManagerPtr manager = func_graph->manager();
  645. MS_EXCEPTION_IF_NULL(manager);
  646. ScopePtr scope = next_node->scope();
  647. MS_EXCEPTION_IF_NULL(scope);
  648. for (size_t i = 0; i < inputs_values.size(); ++i) {
  649. auto value_ptr = inputs_values[i];
  650. auto tensor = value_ptr->cast<tensor::TensorPtr>();
  651. MS_EXCEPTION_IF_NULL(tensor);
  652. TensorInfo tensor_info = op_info->inputs_tensor_info()[i];
  653. TensorLayout tensor_layout = tensor_info.tensor_layout();
  654. auto value_node = NewValueNode(value_ptr)->cast<AnfNodePtr>();
  655. Operator op = CreateGetTensorSliceOp(tensor_layout);
  656. std::vector<AnfNodePtr> node_input = CreateInput(op, value_node, SPLIT_TENSOR);
  657. CNodePtr new_node = func_graph->NewCNode(node_input);
  658. new_node->set_in_forward_flag(true);
  659. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  660. MS_EXCEPTION_IF_NULL(new_node_value);
  661. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  662. new_node_prim->set_instance_name(SPLIT_TENSOR);
  663. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  664. new_node->set_scope(scope);
  665. node_input[0]->set_scope(scope);
  666. make_tuple_inputs.push_back(new_node);
  667. }
  668. CNodePtr make_tuple = func_graph->NewCNode(make_tuple_inputs);
  669. manager->Replace(node, make_tuple);
  670. }
  671. void StepSplitTensor(const AnfNodePtr &node, const FuncGraphManagerPtr &manager) {
  672. MS_EXCEPTION_IF_NULL(node);
  673. MS_EXCEPTION_IF_NULL(manager);
  674. AnfNodeIndexSet node_set = manager->node_users()[node];
  675. for (auto &node_pair : node_set) {
  676. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  677. if (use_cnode == nullptr || !IsValueNode<Primitive>(use_cnode->input(0))) {
  678. continue;
  679. }
  680. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  681. MS_EXCEPTION_IF_NULL(prim_anf_node);
  682. PrimitivePtr use_cnode_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  683. MS_EXCEPTION_IF_NULL(use_cnode_prim);
  684. if (use_cnode_prim->name() == DEPEND && node_pair.second != 1) {
  685. continue;
  686. }
  687. if (IsParallelCareNode(use_cnode)) {
  688. if (IsValueNode<ValueList>(node) || IsValueNode<ValueTuple>(node)) {
  689. SplitTensorList(node, use_cnode, node_pair.second);
  690. } else {
  691. SplitTensor(node, use_cnode, node_pair.second);
  692. }
  693. }
  694. }
  695. }
  696. std::vector<AnfNodePtr> ReplaceOpInput(const Operator &replace_op, const std::string &instance_name,
  697. const CNodePtr &node) {
  698. OperatorArgs arg_replace_op = replace_op.second;
  699. ValuePtr pyop_instance = CreatOpInstance(arg_replace_op.first, replace_op.first, instance_name);
  700. if (pyop_instance == nullptr) {
  701. MS_LOG(EXCEPTION) << "Failure: " << replace_op.first << " CreatOpInstance failed";
  702. }
  703. OperatorParams params = arg_replace_op.second;
  704. if (node->inputs().size() < 2) {
  705. // GetNext operator dose not has input
  706. if (node->inputs().size() == 1) {
  707. return {NewValueNode(pyop_instance)};
  708. }
  709. MS_LOG(EXCEPTION) << "Failure: " << node->ToString() << " size is smaller than 2";
  710. }
  711. std::vector<AnfNodePtr> replace_input = {NewValueNode(pyop_instance), node->input(1)};
  712. if (replace_op.first == EMBEDDING_LOOKUP) {
  713. replace_input = {NewValueNode(pyop_instance), node->input(1), node->input(2)};
  714. }
  715. if (!params.empty()) {
  716. Param param_first = *(params.begin());
  717. int64_t first_position = param_first.second;
  718. if (first_position == 1) {
  719. replace_input.pop_back();
  720. }
  721. for (auto &param : params) {
  722. AnfNodePtr val = NewValueNode(param.first.second);
  723. if (val == nullptr) {
  724. MS_LOG(EXCEPTION) << "Failure:val is nullptr";
  725. }
  726. int64_t position = param.second;
  727. (void)replace_input.insert(replace_input.begin() + position, val);
  728. }
  729. }
  730. return replace_input;
  731. }
  732. void ReplaceOneOp(const Operator &replace_op, const CNodePtr &node) {
  733. FuncGraphPtr func_graph = node->func_graph();
  734. MS_EXCEPTION_IF_NULL(func_graph);
  735. FuncGraphManagerPtr manager = func_graph->manager();
  736. if (manager == nullptr) {
  737. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  738. }
  739. std::string instance_name = CreateInstanceName(node, 0);
  740. std::vector<AnfNodePtr> replace_input;
  741. replace_input = ReplaceOpInput(replace_op, instance_name, node);
  742. if (node->inputs().size() == DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  743. replace_input.push_back(node->input(3));
  744. }
  745. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  746. MS_EXCEPTION_IF_NULL(replace_node);
  747. ScopePtr scope = node->scope();
  748. MS_EXCEPTION_IF_NULL(scope);
  749. replace_node->set_scope(scope);
  750. replace_node->set_in_forward_flag(true);
  751. replace_input[0]->set_scope(scope);
  752. (void)manager->Replace(node, replace_node);
  753. }
  754. void StepReplaceOp(OperatorVector replace_op, const CNodePtr &node) {
  755. // step1:get graph manager distribute_operator
  756. OperatorInfoPtr distribute_operator = node->user_data<OperatorInfo>();
  757. if (distribute_operator == nullptr) {
  758. MS_LOG(EXCEPTION) << "Failure:AddNode error since distribute_operator is nullptr";
  759. }
  760. FuncGraphPtr func_graph = node->func_graph();
  761. MS_EXCEPTION_IF_NULL(func_graph);
  762. FuncGraphManagerPtr manager = func_graph->manager();
  763. if (manager == nullptr) {
  764. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  765. }
  766. // step2:traverse op_list and insert node
  767. std::reverse(replace_op.begin(), replace_op.end());
  768. auto replace_op_info = distribute_operator->replace_op_info();
  769. std::reverse(replace_op_info.begin(), replace_op_info.end());
  770. if (!replace_op_info.empty() && replace_op_info.size() != replace_op.size()) {
  771. MS_LOG(EXCEPTION) << "replace_op_info is not empty and size not equal to replace_op!";
  772. }
  773. bool replace_op_info_flag = !replace_op_info.empty();
  774. for (size_t index = 0; index < replace_op.size(); ++index) {
  775. std::string instance_name = CreateInstanceName(node, index);
  776. std::vector<AnfNodePtr> replace_input;
  777. if (index != replace_op.size() - 1) {
  778. replace_input = CreateInput(replace_op[index], node, instance_name);
  779. } else {
  780. replace_input = ReplaceOpInput(replace_op[index], instance_name, node);
  781. }
  782. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  783. MS_EXCEPTION_IF_NULL(replace_node);
  784. ScopePtr scope = node->scope();
  785. MS_EXCEPTION_IF_NULL(scope);
  786. replace_node->set_scope(scope);
  787. PrimitivePtr prim = GetValueNode<PrimitivePtr>(replace_node->input(0));
  788. if (prim->name() == EMBEDDING_LOOKUP) {
  789. auto attrs = prim->attrs();
  790. attrs[TARGET] = MakeValue(CPU);
  791. (void)prim->SetAttrs(attrs);
  792. }
  793. if (index == replace_op.size() - 1) {
  794. replace_node->set_user_data<OperatorInfo>(node->user_data<OperatorInfo>());
  795. }
  796. replace_node->set_in_forward_flag(true);
  797. replace_input[0]->set_scope(scope);
  798. if (replace_op_info_flag && replace_op_info[index].first) {
  799. auto new_cnode = InsertMakeTuple(replace_node, replace_op_info[index].second, func_graph);
  800. (void)manager->Replace(node, new_cnode); // using Replace function to insert node
  801. } else {
  802. (void)manager->Replace(node, replace_node); // using Replace function to insert node
  803. }
  804. }
  805. MS_LOG(INFO) << "Insert ReplaceOp success for " << distribute_operator->name();
  806. }
  807. bool IsSomePrimitive(const CNodePtr &cnode, const std::string &name) {
  808. ValueNodePtr anf_node = cnode->input(0)->cast<ValueNodePtr>();
  809. MS_EXCEPTION_IF_NULL(anf_node);
  810. PrimitivePtr prim = anf_node->value()->cast<PrimitivePtr>();
  811. return (prim->name() == name);
  812. }
  813. void StepReplaceGraph(const ReplaceGraphPtr &replace_graph, const CNodePtr &node) {
  814. MS_EXCEPTION_IF_NULL(replace_graph);
  815. MS_EXCEPTION_IF_NULL(node);
  816. MS_EXCEPTION_IF_NULL(replace_graph->second);
  817. FuncGraphPtr func_graph = node->func_graph();
  818. MS_EXCEPTION_IF_NULL(func_graph);
  819. FuncGraphManagerPtr manager = func_graph->manager();
  820. if (manager == nullptr) {
  821. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  822. }
  823. // Solve the input order
  824. // For example input_node:{segment_sum:1, segment_sum:2, gahter:2}
  825. // The Original code here will bind the all operations to the first inputs of these operatos
  826. // However, the segment_sum operation needs two inputs, To solve this
  827. // We maintain a dict to count the times of the same operations,
  828. // and bind the inputs according to the times of the op appears.
  829. static std::unordered_map<AnfNodePtr, int> input_map = {};
  830. static int appear_count = 0;
  831. for (auto &replace_input : replace_graph->first) {
  832. auto pre_node = node->input(LongToSize(replace_input.second));
  833. auto it = input_map.find(replace_input.first);
  834. if (it != input_map.end()) {
  835. appear_count = 1 + it->second;
  836. } else {
  837. appear_count = 1;
  838. }
  839. input_map[replace_input.first] = appear_count;
  840. manager->SetEdge(replace_input.first, appear_count, pre_node);
  841. }
  842. // "(void)manager->Replace(replace_graph->first, pre_node);" can not be called
  843. auto replace_output = replace_graph->second;
  844. MS_EXCEPTION_IF_NULL(replace_output);
  845. (void)manager->Replace(node, replace_output);
  846. }
  847. int64_t GetTupleGetItemIndex(const CNodePtr &cnode) {
  848. MS_EXCEPTION_IF_NULL(cnode);
  849. if (cnode->inputs().size() != 3) {
  850. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is not 3";
  851. }
  852. if (!cnode->input(2)->isa<ValueNode>()) {
  853. MS_LOG(EXCEPTION) << "The index of tuple getitem is not a value node";
  854. }
  855. ValuePtr tuple_index_value = GetValueNode(cnode->input(2));
  856. MS_EXCEPTION_IF_NULL(tuple_index_value);
  857. if (!tuple_index_value->isa<Int64Imm>()) {
  858. MS_LOG(EXCEPTION) << "The index of tuple getitem is not int32";
  859. }
  860. return tuple_index_value->cast<Int64ImmPtr>()->value();
  861. }
  862. void InsertVirtualDivOp(const VirtualDivOp &virtual_div_op, const CNodePtr &node) {
  863. MS_EXCEPTION_IF_NULL(node);
  864. size_t node_size = node->inputs().size();
  865. FuncGraphPtr func_graph = node->func_graph();
  866. MS_EXCEPTION_IF_NULL(func_graph);
  867. FuncGraphManagerPtr manager = func_graph->manager();
  868. MS_EXCEPTION_IF_NULL(manager);
  869. for (size_t index = 1; index < node_size; ++index) {
  870. AnfNodePtr input = node->input(index);
  871. MS_EXCEPTION_IF_NULL(input);
  872. // if it is not a tensor, continue
  873. if ((!input->isa<CNode>() && !input->isa<Parameter>()) || HasAbstractMonad(input)) {
  874. MS_LOG(INFO) << "insert div op: the index " << index << " is not tensor, skip";
  875. continue;
  876. }
  877. for (size_t pos = 0; pos < virtual_div_op.size(); ++pos) {
  878. std::string instance_name = CreateInstanceName(node, pos);
  879. InsertNode(virtual_div_op[pos], node, index, node->input(index), func_graph, instance_name);
  880. }
  881. MS_LOG(INFO) << "insert div op for input index " << index << " of node";
  882. }
  883. }
  884. // Only used for InsertMirrorOps
  885. std::pair<AnfNodePtr, bool> FindParameter(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  886. if (!node->isa<Parameter>() && !node->isa<CNode>() && !node->isa<ValueNode>()) {
  887. return std::make_pair(nullptr, false);
  888. } else if (node->isa<Parameter>()) {
  889. auto param_ptr = node->user_data<parallel::TensorLayout>();
  890. if (param_ptr != nullptr && !param_ptr->opt_shard_group().empty()) {
  891. return std::make_pair(nullptr, false);
  892. } else {
  893. return std::make_pair(node, false);
  894. }
  895. } else if (node->isa<ValueNode>()) {
  896. if (IsValueNode<RefKey>(node)) {
  897. std::vector<AnfNodePtr> param_v = FindParameterByRefKeyNode(node, func_graph);
  898. if (param_v.size() != 1) {
  899. MS_LOG(EXCEPTION) << "FindParameterByRefKeyNode failed, return vector size must be 1, real is "
  900. << param_v.size();
  901. }
  902. auto param_ptr = param_v[0]->user_data<parallel::TensorLayout>();
  903. if (param_ptr != nullptr && !param_ptr->opt_shard_group().empty()) {
  904. return std::make_pair(nullptr, true);
  905. } else {
  906. return std::make_pair(node, true);
  907. }
  908. }
  909. return std::make_pair(nullptr, false);
  910. } else {
  911. CNodePtr cnode = node->cast<CNodePtr>();
  912. MS_EXCEPTION_IF_NULL(cnode);
  913. if (!IsValueNode<Primitive>(cnode->input(0))) {
  914. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  915. if (!FindParameter(cnode->input(index), func_graph).first) {
  916. continue;
  917. }
  918. return FindParameter(cnode->input(index), func_graph);
  919. }
  920. } else {
  921. if (IsSomePrimitive(cnode, RECEIVE) && !cnode->has_user_data<OperatorInfo>()) {
  922. return std::make_pair(node, false);
  923. }
  924. if (IsParallelCareNode(cnode)) {
  925. return std::make_pair(nullptr, false);
  926. } else {
  927. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  928. MS_EXCEPTION_IF_NULL(prim_anf_node);
  929. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  930. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  931. MS_EXCEPTION_IF_NULL(prim);
  932. if ((prim->name() == DEPEND || prim->name() == LOAD) && index != 1) {
  933. continue;
  934. }
  935. if (!FindParameter(cnode->input(index), func_graph).first) {
  936. continue;
  937. }
  938. return FindParameter(cnode->input(index), func_graph);
  939. }
  940. }
  941. }
  942. }
  943. return std::make_pair(nullptr, false);
  944. }
  945. std::pair<bool, CNodePtr> FindCNode(const AnfNodePtr &anode, const std::string &name, const FuncGraphPtr &func_graph) {
  946. MS_EXCEPTION_IF_NULL(anode);
  947. MS_EXCEPTION_IF_NULL(anode->func_graph());
  948. FuncGraphManagerPtr manager = anode->func_graph()->manager();
  949. MS_EXCEPTION_IF_NULL(manager);
  950. AnfNodeIndexSet node_set = manager->node_users()[anode];
  951. bool result = false;
  952. CNodePtr cnode_return = nullptr;
  953. for (auto &node_pair : node_set) {
  954. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  955. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  956. continue;
  957. }
  958. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  959. MS_EXCEPTION_IF_NULL(prim_anf_node);
  960. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  961. MS_EXCEPTION_IF_NULL(node_prim);
  962. if (node_prim->name() == name && node_pair.second == 1) {
  963. if (use_apply->func_graph() == func_graph) {
  964. result = true;
  965. cnode_return = use_apply;
  966. MS_LOG(INFO) << "Find Primitive " << name << " in the same func_graph";
  967. continue;
  968. }
  969. MS_LOG(INFO) << "Find Primitive " << name << " in different func_graph";
  970. }
  971. }
  972. return std::make_pair(result, cnode_return);
  973. }
  974. bool IsCastBeforMirror(const CNodePtr &node, size_t index) {
  975. // only if gradient_fp32_sync is true, pre node is cast and type is not float32 return true
  976. if (!ParallelContext::GetInstance()->gradient_fp32_sync()) {
  977. return false;
  978. }
  979. auto pre_node = node->input(index);
  980. MS_EXCEPTION_IF_NULL(pre_node);
  981. auto cnode = pre_node->cast<CNodePtr>();
  982. if (cnode == nullptr || !IsValueNode<Primitive>(cnode->input(0))) {
  983. return false;
  984. }
  985. auto pre_value_node = cnode->input(0)->cast<ValueNodePtr>();
  986. MS_EXCEPTION_IF_NULL(pre_value_node);
  987. auto pre_prim = pre_value_node->value()->cast<PrimitivePtr>();
  988. MS_EXCEPTION_IF_NULL(pre_prim);
  989. if (pre_prim->name() != CAST) {
  990. return false;
  991. }
  992. auto node_type = pre_node->Type();
  993. MS_EXCEPTION_IF_NULL(node_type);
  994. if (!node_type->isa<mindspore::TensorType>()) {
  995. MS_LOG(EXCEPTION) << "Unknown type.";
  996. }
  997. auto input_element_type = node_type->cast<mindspore::TensorTypePtr>()->element();
  998. MS_EXCEPTION_IF_NULL(input_element_type);
  999. auto type_id = input_element_type->type_id();
  1000. return (type_id != kNumberTypeFloat32);
  1001. }
  1002. static void AddCommOpFusionType(const CNodePtr &comm_node, const AnfNodePtr &param_node) {
  1003. MS_EXCEPTION_IF_NULL(comm_node);
  1004. MS_EXCEPTION_IF_NULL(param_node);
  1005. if (IsPrimitiveCNode(param_node, prim::kPrimReceive)) {
  1006. MS_LOG(WARNING) << "The mirror of Receive does not support fusion type now.";
  1007. return;
  1008. }
  1009. auto param = param_node->cast<ParameterPtr>();
  1010. MS_EXCEPTION_IF_NULL(param);
  1011. auto prim = GetValueNode<PrimitivePtr>(comm_node->input(0));
  1012. MS_EXCEPTION_IF_NULL(prim);
  1013. auto attrs = prim->attrs();
  1014. auto param_info = param->param_info();
  1015. if (!param_info) {
  1016. MS_LOG(WARNING) << param->ToString() << "does not have parameter info.";
  1017. return;
  1018. }
  1019. int32_t fusion_type = param_info->comm_fusion();
  1020. attrs[FUSION] = MakeValue<int64_t>(fusion_type);
  1021. prim->SetAttrs(attrs);
  1022. MS_LOG(INFO) << "Set comm fusion:" << param->param_info()->name() << "'s fusion type is " << fusion_type;
  1023. }
  1024. void InsertMirrorOps(const FuncGraphPtr &root, const MirrorOps &mirror_ops, const CNodePtr &node) {
  1025. MS_EXCEPTION_IF_NULL(node);
  1026. size_t node_size = node->inputs().size();
  1027. FuncGraphPtr func_graph = node->func_graph();
  1028. MS_EXCEPTION_IF_NULL(func_graph);
  1029. FuncGraphManagerPtr manager = func_graph->manager();
  1030. MS_EXCEPTION_IF_NULL(manager);
  1031. for (auto input : node->inputs()) {
  1032. if (input->isa<CNode>() && HasAbstractMonad(input)) {
  1033. node_size--;
  1034. }
  1035. }
  1036. if ((node->inputs().size() == 2) && (IsValueNode<ValueSequeue>(node->input(1)))) {
  1037. MS_LOG(INFO) << "Input is ValueList, skip it.";
  1038. return;
  1039. }
  1040. if ((node->inputs().size() == 2) &&
  1041. (AnfNodeIsPrimitive(node->input(1), MAKE_TUPLE) || AnfNodeIsPrimitive(node->input(1), MAKE_LIST))) {
  1042. MS_LOG(INFO) << "The mirror for " << GetPrimName(node) << " has handle by make_tuple node";
  1043. return;
  1044. }
  1045. if (mirror_ops.size() != node_size - 1) {
  1046. MS_LOG(EXCEPTION) << "Mirrorops's size is wrong! mirror_ops size is " << mirror_ops.size() << ", node_size is "
  1047. << node_size - 1;
  1048. }
  1049. for (size_t index = 1; index < node_size; ++index) {
  1050. OperatorVector backward_op = mirror_ops[index - 1];
  1051. if (backward_op.empty()) {
  1052. continue;
  1053. }
  1054. std::pair<AnfNodePtr, bool> param_node_pair = FindParameter(node->input(index), func_graph);
  1055. if (!param_node_pair.first) {
  1056. continue;
  1057. }
  1058. auto param_ptr = param_node_pair.first->cast<ParameterPtr>();
  1059. std::string param_name;
  1060. if (param_ptr != nullptr) {
  1061. param_name = param_ptr->name();
  1062. }
  1063. // not a RefKey
  1064. if (!param_node_pair.second) {
  1065. auto next_cnode = FindCNode(param_node_pair.first, MIRROR_OPERATOR, func_graph);
  1066. // if there is already a MirrorOp in the same graph, use MirrorOp CNode as a input instead
  1067. if (next_cnode.first) {
  1068. MS_EXCEPTION_IF_NULL(next_cnode.second);
  1069. // param->cast->op, insert mirror before cast
  1070. if (node->input(index)->isa<CNode>()) {
  1071. auto pre_cnode = node->input(index)->cast<CNodePtr>();
  1072. auto pre_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1073. if (pre_prim->name() == CAST) {
  1074. manager->SetEdge(pre_cnode, 1, next_cnode.second);
  1075. continue;
  1076. }
  1077. }
  1078. manager->SetEdge(node, SizeToLong(index), next_cnode.second);
  1079. continue;
  1080. }
  1081. }
  1082. // if the parameter found is a RefKey, or no MirrorOp is found in the same graph, insert a new MirrorOp
  1083. // only one MirrorOp in backward_op
  1084. if (backward_op.size() != 1) {
  1085. MS_LOG(EXCEPTION) << "backward_op size must be 1, real is " << backward_op.size();
  1086. }
  1087. std::string instance_name = MIRROR_OP;
  1088. if (IsCastBeforMirror(node, index)) {
  1089. for (auto &op : backward_op) {
  1090. // insert new node before the node
  1091. CNodePtr cnode = node->input(index)->cast<CNodePtr>();
  1092. MS_EXCEPTION_IF_NULL(cnode);
  1093. AnfNodePtr pre_node = cnode->input(1);
  1094. InsertMirrorNode(root, op, cnode, size_t(1), pre_node, func_graph, instance_name, param_name);
  1095. auto comm_op = cnode->input(size_t(1))->cast<CNodePtr>();
  1096. // add fusion flag
  1097. // pipeline mirror would not be set, which should be supported later
  1098. AddCommOpFusionType(comm_op, param_node_pair.first);
  1099. }
  1100. } else {
  1101. for (auto &op : backward_op) {
  1102. AnfNodePtr pre_node = node->input(index);
  1103. InsertMirrorNode(root, op, node, index, pre_node, func_graph, instance_name, param_name);
  1104. auto comm_op = node->input(index)->cast<CNodePtr>();
  1105. // add fusion flag
  1106. // pipeline mirror would not be set, which should be supported later
  1107. AddCommOpFusionType(comm_op, param_node_pair.first);
  1108. }
  1109. }
  1110. }
  1111. }
  1112. void BackwardCommunication(const FuncGraphPtr &root, const OperatorInfoPtr &distribute_operator, const CNodePtr &node,
  1113. const std::vector<std::pair<CNodePtr, LossNodeInfo>> &sens_loss_pairs) {
  1114. MS_EXCEPTION_IF_NULL(distribute_operator);
  1115. MS_EXCEPTION_IF_NULL(node);
  1116. bool is_loss_cnode =
  1117. std::any_of(sens_loss_pairs.begin(), sens_loss_pairs.end(),
  1118. [node](const std::pair<CNodePtr, LossNodeInfo> &element) { return element.second.loss_node == node; });
  1119. MirrorOps mirror_ops = distribute_operator->mirror_ops();
  1120. VirtualDivOp virtual_div_op = distribute_operator->virtual_div_op();
  1121. // insert mirror op
  1122. if (!mirror_ops.empty()) {
  1123. MS_LOG(INFO) << "insert mirror op for " << distribute_operator->name();
  1124. InsertMirrorOps(root, mirror_ops, node);
  1125. }
  1126. // insert virtual div op
  1127. if (!virtual_div_op.empty() && is_loss_cnode) {
  1128. MS_LOG(INFO) << "insert virtual div op for " << distribute_operator->name();
  1129. InsertVirtualDivOp(virtual_div_op, node);
  1130. }
  1131. }
  1132. std::string GetDisOpName(const std::string &prim_name) {
  1133. std::string op_name = prim_name;
  1134. if (!prim_name.empty() && (prim_name[0] == '_')) {
  1135. op_name = prim_name.substr(1);
  1136. }
  1137. return op_name + "Info";
  1138. }
  1139. OperatorInfoPtr OperatorInstanceByName(const std::string &name, const PrimitiveAttrs &attrs,
  1140. const std::vector<Shapes> &shape_list) {
  1141. if (shape_list.size() != 2) {
  1142. MS_LOG(ERROR) << "The size of shape list is not 2";
  1143. return nullptr;
  1144. }
  1145. if (name.length() == 0) {
  1146. MS_LOG(EXCEPTION) << "Length of name is zero!";
  1147. }
  1148. std::string distribute_opname = GetDisOpName(name);
  1149. if (name == GATHERV2) {
  1150. distribute_opname = name + "PInfo";
  1151. auto data_parallel_iter = attrs.find(DATA_PARALLEL);
  1152. if (data_parallel_iter != attrs.end()) {
  1153. MS_EXCEPTION_IF_NULL(data_parallel_iter->second);
  1154. if (!data_parallel_iter->second->isa<BoolImm>()) {
  1155. MS_LOG(EXCEPTION) << ": data_parallel flag's type is not a bool.";
  1156. }
  1157. bool data_parallel = data_parallel_iter->second->cast<BoolImmPtr>()->value();
  1158. if (data_parallel) {
  1159. distribute_opname = name + "Info";
  1160. }
  1161. }
  1162. }
  1163. OperatorInfoPtr operator_ =
  1164. (OperatorInfoPtr)DynCreator::Instance().Create(distribute_opname, shape_list[0], shape_list[1], attrs, TOTAL_OPS);
  1165. if (operator_ == nullptr) {
  1166. MS_LOG(INFO) << "Create " << name << " failed";
  1167. return nullptr;
  1168. }
  1169. std::string origin_name = operator_->name();
  1170. operator_->set_name(origin_name + std::to_string(TOTAL_OPS));
  1171. MS_LOG(INFO) << "Successfully created operator " << origin_name;
  1172. ++TOTAL_OPS;
  1173. return operator_;
  1174. }
  1175. OperatorInfoPtr OperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  1176. const std::vector<Shapes> &shape_list) {
  1177. MS_EXCEPTION_IF_NULL(prim);
  1178. OperatorInfoPtr operator_ = OperatorInstanceByName(prim->name(), attrs, shape_list);
  1179. if (operator_ == nullptr) {
  1180. if (IsInBatchParallelBlackList(prim)) {
  1181. MS_LOG(EXCEPTION) << "Operator " << prim->name() << " is not supported yet in auto parallel mode.";
  1182. }
  1183. MS_LOG(INFO) << "Create " << prim->name() << " failed, use batch parallel";
  1184. operator_ = OperatorInstanceByName(BATCH_PARALLEL, attrs, shape_list);
  1185. MS_EXCEPTION_IF_NULL(operator_);
  1186. }
  1187. return operator_;
  1188. }
  1189. OperatorInfoPtr NewOperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  1190. std::vector<Shapes> shape_list) {
  1191. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1192. for (size_t i = 0; i < shape_list[0].size(); ++i) {
  1193. MS_LOG(INFO) << "No: " << i << " input's shape: " << ShapeToString(shape_list[0][i]);
  1194. }
  1195. return operator_;
  1196. }
  1197. StrategyPtr ExtractStrategy(std::unordered_map<std::string, ValuePtr> attrs) {
  1198. ValueTuplePtr var = attrs[STRATEGY]->cast<ValueTuplePtr>();
  1199. StrategyPtr strategyPtr;
  1200. int64_t stage_id = g_device_manager->stage_id();
  1201. MS_LOG(INFO) << "Extract information: strategy " << attrs[STRATEGY]->ToString();
  1202. if (var == nullptr) {
  1203. MS_LOG(EXCEPTION) << "Strategy value is nullptr";
  1204. }
  1205. if (var->size() > 0) {
  1206. std::vector<ValuePtr> elements = var->value();
  1207. Strategys strategy;
  1208. for (uint64_t index = 0; index < elements.size(); ++index) {
  1209. Dimensions dim;
  1210. if (elements[index]->isa<ValueSequeue>()) {
  1211. ValueTuplePtr value_tuple = elements[index]->cast<ValueTuplePtr>();
  1212. std::vector<ValuePtr> value_vector = value_tuple->value();
  1213. (void)std::transform(value_vector.begin(), value_vector.end(), std::back_inserter(dim),
  1214. [](const ValuePtr &value) { return static_cast<int64_t>(GetValue<int64_t>(value)); });
  1215. strategy.push_back(dim);
  1216. } else {
  1217. MS_LOG(EXCEPTION) << "Failure: Strategy's format is wrong! Need ValueSequence";
  1218. }
  1219. }
  1220. if (strategy.empty()) {
  1221. MS_LOG(EXCEPTION) << "ExtractStrategy: failed to extract strategy";
  1222. }
  1223. strategyPtr = NewStrategy(stage_id, strategy);
  1224. }
  1225. return strategyPtr;
  1226. }
  1227. Shapes GetValueListShape(const AnfNodePtr &node) {
  1228. Shapes shapes;
  1229. std::vector<ValuePtr> inputs_seq;
  1230. if (IsValueNode<ValueList>(node)) {
  1231. inputs_seq = node->cast<ValueNodePtr>()->value()->cast<ValueListPtr>()->value();
  1232. } else if (IsValueNode<ValueTuple>(node)) {
  1233. inputs_seq = node->cast<ValueNodePtr>()->value()->cast<ValueTuplePtr>()->value();
  1234. } else {
  1235. MS_LOG(EXCEPTION) << "node is eigther ValueList or ValueTuple";
  1236. }
  1237. for (auto &ele : inputs_seq) {
  1238. auto tensor = ele->cast<tensor::TensorPtr>();
  1239. MS_EXCEPTION_IF_NULL(tensor);
  1240. auto one_shape = tensor->shape();
  1241. shapes.push_back(one_shape);
  1242. }
  1243. return shapes;
  1244. }
  1245. Shapes GetNodeShape(const AnfNodePtr &node) {
  1246. MS_EXCEPTION_IF_NULL(node);
  1247. Shapes shapes;
  1248. if (IsValueNode<ValueList>(node) || IsValueNode<ValueTuple>(node)) {
  1249. return GetValueListShape(node);
  1250. }
  1251. BaseShapePtr base_shape_ptr = node->Shape();
  1252. if (node->isa<CNode>()) {
  1253. auto cnode = node->cast<CNodePtr>();
  1254. if (IsValueNode<Primitive>(cnode->input(0))) {
  1255. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1256. MS_EXCEPTION_IF_NULL(prim);
  1257. if (prim->name() == MAKEREF) {
  1258. AnfNodePtr ref_node = cnode->input(1);
  1259. auto func_graph = cnode->func_graph();
  1260. MS_EXCEPTION_IF_NULL(ref_node);
  1261. MS_EXCEPTION_IF_NULL(func_graph);
  1262. return GetRefKeyNodeShape(ref_node, func_graph);
  1263. }
  1264. }
  1265. if (cnode->input(0)->isa<CNode>()) {
  1266. if (cnode->inputs().size() < 2) {
  1267. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " size is smaller than 2";
  1268. }
  1269. base_shape_ptr = cnode->input(1)->Shape();
  1270. }
  1271. }
  1272. if (base_shape_ptr == nullptr) {
  1273. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " shape_ptr is nullptr, full name is "
  1274. << node->fullname_with_scope();
  1275. }
  1276. auto tuple_shape_ptr = dyn_cast<abstract::SequeueShape>(base_shape_ptr);
  1277. if (tuple_shape_ptr != nullptr) {
  1278. auto tuple_shape = tuple_shape_ptr->shape();
  1279. for (auto &shape : tuple_shape) {
  1280. auto each_shape = dyn_cast<abstract::Shape>(shape);
  1281. MS_EXCEPTION_IF_NULL(each_shape);
  1282. shapes.push_back(each_shape->shape());
  1283. }
  1284. } else {
  1285. auto shape_ptr = dyn_cast<abstract::Shape>(base_shape_ptr);
  1286. MS_EXCEPTION_IF_NULL(shape_ptr);
  1287. shapes.push_back(shape_ptr->shape());
  1288. }
  1289. return shapes;
  1290. }
  1291. Shapes GetRefKeyNodeShape(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1292. MS_EXCEPTION_IF_NULL(node);
  1293. MS_EXCEPTION_IF_NULL(func_graph);
  1294. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(node, func_graph);
  1295. if (parameters.size() != 1) {
  1296. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1297. }
  1298. Shapes input_shapes;
  1299. input_shapes = GetNodeShape(parameters[0]);
  1300. if (input_shapes.size() != 1) {
  1301. MS_LOG(EXCEPTION) << "Get input shape failed";
  1302. }
  1303. MS_LOG(INFO) << "The parameter shape is " << ShapeToString(input_shapes[0]);
  1304. return input_shapes;
  1305. }
  1306. std::vector<Shapes> ExtractShape(const CNodePtr &node) {
  1307. MS_EXCEPTION_IF_NULL(node);
  1308. Shapes shape_inputs, shape_outputs;
  1309. std::vector<Shapes> shape_all;
  1310. std::vector<AnfNodePtr> all_inputs = node->inputs();
  1311. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  1312. size_t inputs_size = all_inputs.size();
  1313. for (size_t i = 1; i < inputs_size; ++i) {
  1314. Shapes input_shapes;
  1315. AnfNodePtr input = all_inputs[i];
  1316. if (IsValueNode<RefKey>(input)) {
  1317. auto func_graph = node->func_graph();
  1318. MS_EXCEPTION_IF_NULL(func_graph);
  1319. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(input, func_graph);
  1320. if (parameters.size() != 1) {
  1321. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1322. }
  1323. std::pair<AnfNodePtr, int64_t> node_pair = std::make_pair(node, SizeToLong(i));
  1324. g_RefMap[parameters[0]] = node_pair;
  1325. input_shapes = GetRefKeyNodeShape(input, func_graph);
  1326. } else if ((input->isa<CNode>() && !HasAbstractMonad(input)) || IsValueNode<Tensor>(input) ||
  1327. input->isa<Parameter>() ||
  1328. ((IsValueNode<ValueList>(input) || IsValueNode<ValueTuple>(input)) && (inputs_size == 2))) {
  1329. input_shapes = GetNodeShape(input);
  1330. } else {
  1331. continue;
  1332. }
  1333. if (input_shapes.size() != 1) {
  1334. if (inputs_size == 2) { // like concat
  1335. shape_inputs = input_shapes;
  1336. break;
  1337. } else {
  1338. MS_LOG(EXCEPTION) << "ExtractShape: Get input shape failed";
  1339. }
  1340. }
  1341. shape_inputs.push_back(input_shapes[0]);
  1342. }
  1343. shape_all.push_back(shape_inputs);
  1344. // extract out shape
  1345. shape_outputs = GetNodeShape(node);
  1346. shape_all.push_back(shape_outputs);
  1347. return shape_all;
  1348. }
  1349. std::pair<AnfNodePtr, int64_t> FindParallelCareNode(const AnfNodePtr &node, int32_t recursion_num) {
  1350. if (recursion_num >= RECURSION_LIMIT) {
  1351. return std::make_pair(nullptr, 0);
  1352. }
  1353. MS_EXCEPTION_IF_NULL(node);
  1354. FuncGraphPtr func_graph = node->func_graph();
  1355. MS_EXCEPTION_IF_NULL(func_graph);
  1356. FuncGraphManagerPtr manager = func_graph->manager();
  1357. MS_EXCEPTION_IF_NULL(manager);
  1358. AnfNodeIndexSet node_set = manager->node_users()[node];
  1359. for (auto &node_pair : node_set) {
  1360. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1361. MS_EXCEPTION_IF_NULL(cnode);
  1362. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1363. continue;
  1364. }
  1365. ValueNodePtr prim_node_anf = cnode->input(0)->cast<ValueNodePtr>();
  1366. MS_EXCEPTION_IF_NULL(prim_node_anf);
  1367. PrimitivePtr node_prim = prim_node_anf->value()->cast<PrimitivePtr>();
  1368. MS_EXCEPTION_IF_NULL(node_prim);
  1369. if ((node_prim->name() == DEPEND && node_pair.second != 1) || IsPrimitiveCNode(cnode, prim::kPrimReceive)) {
  1370. continue;
  1371. }
  1372. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1373. return node_pair;
  1374. } else {
  1375. auto tmp_pair = FindParallelCareNode(node_pair.first, recursion_num + 1);
  1376. if (tmp_pair.first != nullptr) {
  1377. return tmp_pair;
  1378. }
  1379. }
  1380. }
  1381. return std::make_pair(nullptr, 0);
  1382. }
  1383. std::pair<AnfNodePtr, int64_t> FindSubGraph(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  1384. MS_EXCEPTION_IF_NULL(graph);
  1385. MS_EXCEPTION_IF_NULL(parameter);
  1386. FuncGraphManagerPtr manager = graph->manager();
  1387. MS_EXCEPTION_IF_NULL(manager);
  1388. std::pair<AnfNodePtr, int64_t> prim_anf_node_pair = FindParallelCareNode(parameter, 0);
  1389. if (prim_anf_node_pair.first != nullptr) {
  1390. return prim_anf_node_pair;
  1391. } else {
  1392. AnfNodeIndexSet param_sub_set = manager->node_users()[parameter];
  1393. for (auto &param_pair : param_sub_set) {
  1394. CNodePtr param_cnode = param_pair.first->cast<CNodePtr>();
  1395. AnfNodePtr graph_value_node;
  1396. if (param_cnode->input(0)->isa<CNode>()) {
  1397. graph_value_node = param_cnode->input(0)->cast<CNodePtr>()->input(1);
  1398. } else {
  1399. graph_value_node = param_cnode->input(0);
  1400. }
  1401. if (!IsValueNode<FuncGraph>(graph_value_node)) {
  1402. continue;
  1403. }
  1404. FuncGraphPtr graph_sub = GetValueNode<FuncGraphPtr>(graph_value_node);
  1405. auto parameters = graph_sub->parameters();
  1406. if (LongToSize(param_pair.second - 1) >= parameters.size()) {
  1407. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1408. << parameters.size();
  1409. }
  1410. std::pair<AnfNodePtr, int64_t> res = FindSubGraph(graph_sub, parameters[LongToSize(param_pair.second - 1)]);
  1411. if (res.first != nullptr) {
  1412. return res;
  1413. }
  1414. }
  1415. }
  1416. return std::make_pair(nullptr, 0);
  1417. }
  1418. static void InsertAllGatherOp(const std::string &group, const std::pair<AnfNodePtr, int> &res,
  1419. const AnfNodePtr &parameter) {
  1420. Operator op = CreateAllGatherOp(group);
  1421. MS_EXCEPTION_IF_NULL(res.first);
  1422. MS_EXCEPTION_IF_NULL(parameter);
  1423. auto cnode = res.first->cast<CNodePtr>();
  1424. auto graph = cnode->func_graph();
  1425. MS_EXCEPTION_IF_NULL(graph);
  1426. auto cnode_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1427. MS_EXCEPTION_IF_NULL(cnode_prim);
  1428. CNodePtr allgather;
  1429. if (cnode_prim->name() == CAST) {
  1430. allgather = ReplaceNode(op, cnode, graph, PARALLEL_OPTIMIZER_ALLGATHER);
  1431. } else {
  1432. InsertNode(op, cnode, res.second, parameter, graph, PARALLEL_OPTIMIZER_ALLGATHER);
  1433. allgather = cnode->input(res.second)->cast<CNodePtr>();
  1434. }
  1435. MS_EXCEPTION_IF_NULL(allgather);
  1436. // add fusion flag
  1437. AddCommOpFusionType(allgather, parameter);
  1438. // add gradients mean
  1439. auto prim = GetValueNode<PrimitivePtr>(allgather->input(0));
  1440. auto attrs = prim->attrs();
  1441. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1442. bool mean_flag = ParallelContext::GetInstance()->gradients_mean();
  1443. attrs["mean_flag"] = MakeValue<bool>(mean_flag);
  1444. prim->SetAttrs(attrs);
  1445. }
  1446. static void ApplyParallelOptOnParam(const FuncGraphPtr &root, const AnfNodePtr &parameter,
  1447. const std::string &opt_shard_group) {
  1448. if (opt_shard_group.empty()) {
  1449. return;
  1450. }
  1451. FuncGraphManagerPtr manager = root->manager();
  1452. MS_EXCEPTION_IF_NULL(manager);
  1453. auto param_sub_set = manager->node_users()[parameter];
  1454. for (auto &param_pair : param_sub_set) {
  1455. auto cnode = param_pair.first->cast<CNodePtr>();
  1456. MS_EXCEPTION_IF_NULL(cnode);
  1457. if (cnode->in_forward_flag()) {
  1458. OperatorInfoPtr distribute_operator = cnode->user_data<OperatorInfo>();
  1459. if (distribute_operator == nullptr) {
  1460. MS_LOG(WARNING) << "Parallel optimizer: " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1461. } else if (IntToSize(param_pair.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1462. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1463. << distribute_operator->inputs_tensor_info().size();
  1464. }
  1465. // insert allgather operator between shard parameter and cnode
  1466. InsertAllGatherOp(opt_shard_group, param_pair, parameter);
  1467. MS_LOG(INFO) << "Parallel optimizer is applied between " << parameter->ToString() << " and " << cnode->ToString();
  1468. }
  1469. }
  1470. }
  1471. // When this function returns non-empty string, that means parallel optimizer is applied on this parameter.
  1472. std::string SetParallelShape(const AnfNodePtr &parameter, const std::pair<AnfNodePtr, int64_t> &res) {
  1473. MS_EXCEPTION_IF_NULL(parameter);
  1474. AbstractBasePtr abstract = parameter->abstract();
  1475. MS_EXCEPTION_IF_NULL(abstract);
  1476. MS_LOG(DEBUG) << "SetParallelShape " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1477. CNodePtr cnode = res.first->cast<CNodePtr>();
  1478. MS_EXCEPTION_IF_NULL(cnode);
  1479. OperatorInfoPtr distribute_operator = cnode->user_data<OperatorInfo>();
  1480. if (distribute_operator == nullptr) {
  1481. MS_LOG(EXCEPTION) << "Failure:node " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1482. }
  1483. if (LongToSize(res.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1484. MS_LOG(EXCEPTION) << "The index is out of range, index is " << res.second - 1 << ", vector size is "
  1485. << distribute_operator->inputs_tensor_info().size();
  1486. }
  1487. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[LongToSize(res.second - 1)];
  1488. TensorLayout tensor_layout = tensorinfo_in.tensor_layout();
  1489. Shape slice_shape = tensor_layout.slice_shape().array();
  1490. std::string opt_shard_group;
  1491. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1492. bool enable_parallel_optimizer = ParallelContext::GetInstance()->enable_parallel_optimizer();
  1493. if (enable_parallel_optimizer) {
  1494. if (!ParameterRequireGrad(parameter)) {
  1495. // only trainable parameters need parallel optimizer
  1496. MS_LOG(INFO) << "Parallel optimizer: " << parameter->ToString() << " is not trainable parameter.";
  1497. } else if (parameter->cast<ParameterPtr>()->param_info() &&
  1498. !parameter->cast<ParameterPtr>()->param_info()->parallel_optimizer()) {
  1499. MS_LOG(INFO) << "Parallel optimizer: " << parameter->ToString() << " does not need weight shard.";
  1500. } else if (tensor_layout.GenerateOptShardSliceShape() == Status::SUCCESS) {
  1501. // get a totally shard tensor slice shape if the weight is repeated on devices
  1502. // and the shape of the first dimension could be divided
  1503. // apply parallel optimizer on parameters
  1504. // create communication group for allgather operator
  1505. slice_shape = tensor_layout.opt_shard_slice_shape();
  1506. std::vector<Group> dev_group;
  1507. if (distribute_operator->CreateGroupByTensorMap(tensor_layout.origin_tensor_map().array(), &dev_group) ==
  1508. Status::SUCCESS &&
  1509. !dev_group.empty()) {
  1510. opt_shard_group = dev_group[0].name();
  1511. // set communication group in tensor layout for checkpoint saving
  1512. tensor_layout.set_opt_shard_group(opt_shard_group);
  1513. MS_LOG(INFO) << "Parallel optimizer: create group " << opt_shard_group << " for " << parameter->ToString()
  1514. << " success.";
  1515. } else {
  1516. MS_LOG(WARNING) << "Parallel optimizer: create group for " << parameter->ToString() << " failed.";
  1517. }
  1518. } else {
  1519. MS_LOG(INFO) << "Parallel optimizer: " << parameter->ToString() << "'s shape does not satisfy the conditions.";
  1520. }
  1521. }
  1522. MS_LOG(INFO) << "SetParallelShape slice_shape " << parameter->ToString() << " shape "
  1523. << MakeValue(slice_shape)->ToString() << ", op name is " << distribute_operator->name();
  1524. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1525. MS_EXCEPTION_IF_NULL(parallel_shape);
  1526. // Don't modify it in-place as the pointer of this AbstractValue may used as cache key in StaticAnalysis.
  1527. auto cloned_abstract = abstract->Clone();
  1528. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1529. cloned_abstract->set_shape(parallel_shape);
  1530. parameter->set_abstract(cloned_abstract);
  1531. ParameterPtr parameter_ptr = parameter->cast<ParameterPtr>();
  1532. MS_EXCEPTION_IF_NULL(parameter_ptr);
  1533. parameter_ptr->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(tensor_layout));
  1534. return opt_shard_group;
  1535. }
  1536. void CoverSliceShape(const FuncGraphPtr &root) {
  1537. MS_EXCEPTION_IF_NULL(root);
  1538. auto parameters = root->parameters();
  1539. for (auto &parameter : parameters) {
  1540. MS_EXCEPTION_IF_NULL(parameter->Shape());
  1541. auto iter = g_RefMap.find(parameter);
  1542. if (iter != g_RefMap.end()) {
  1543. std::string group = SetParallelShape(parameter, g_RefMap[parameter]);
  1544. // find all forward nodes that use parameter in graphs and insert allgather if group is not empty
  1545. ApplyParallelOptOnParam(root, parameter, group);
  1546. continue;
  1547. }
  1548. std::pair<AnfNodePtr, int64_t> res = FindSubGraph(root, parameter);
  1549. if (res.first == nullptr) {
  1550. MS_LOG(INFO) << "Parameter " << parameter->ToString() << " don't need to set parallel shape";
  1551. } else {
  1552. std::string group = SetParallelShape(parameter, res);
  1553. // find all forward nodes that use parameter in graphs and insert allgather if group is not empty
  1554. ApplyParallelOptOnParam(root, parameter, group);
  1555. MS_LOG(DEBUG) << "Parameter " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1556. }
  1557. }
  1558. g_RefMap.clear();
  1559. }
  1560. void SetClonedTensorShapeForOptimizer(const FuncGraphPtr &root) {
  1561. MS_EXCEPTION_IF_NULL(root);
  1562. for (auto &cloned_parameter_node : root->parameters()) {
  1563. MS_EXCEPTION_IF_NULL(cloned_parameter_node);
  1564. auto cloned_parameter = cloned_parameter_node->cast<ParameterPtr>();
  1565. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1566. if (!ParameterIsCloned(cloned_parameter_node)) {
  1567. continue;
  1568. }
  1569. auto param_value = cloned_parameter->param_info();
  1570. if (param_value == nullptr) {
  1571. continue;
  1572. }
  1573. // get the cloned index
  1574. int64_t cloned_index = param_value->cloned_index();
  1575. // find the be cloned parameter
  1576. bool found_be_cloned_parameter = false;
  1577. ParameterPtr cloned_from_parameter = nullptr;
  1578. AnfNodePtr cloned_from_node = nullptr;
  1579. for (auto &be_cloned_parameter_node : root->parameters()) {
  1580. MS_EXCEPTION_IF_NULL(be_cloned_parameter_node);
  1581. auto be_cloned_parameter = be_cloned_parameter_node->cast<ParameterPtr>();
  1582. MS_EXCEPTION_IF_NULL(be_cloned_parameter);
  1583. if (!be_cloned_parameter->has_default()) {
  1584. continue;
  1585. }
  1586. auto param_value_in = be_cloned_parameter->param_info();
  1587. if (param_value_in == nullptr) {
  1588. continue;
  1589. }
  1590. if (!param_value_in->be_cloned()) {
  1591. continue;
  1592. }
  1593. // get the be cloned index
  1594. auto &be_cloned_index = param_value_in->be_cloned_index();
  1595. if (std::find(be_cloned_index.begin(), be_cloned_index.end(), cloned_index) != be_cloned_index.end()) {
  1596. found_be_cloned_parameter = true;
  1597. cloned_from_parameter = be_cloned_parameter;
  1598. cloned_from_node = be_cloned_parameter_node;
  1599. }
  1600. }
  1601. if (found_be_cloned_parameter) {
  1602. // set the shape and tensor layout for cloned parameter
  1603. cloned_parameter->set_user_data<TensorLayout>(cloned_from_parameter->user_data<TensorLayout>());
  1604. MS_EXCEPTION_IF_NULL(cloned_parameter_node->abstract());
  1605. MS_EXCEPTION_IF_NULL(cloned_from_node->abstract());
  1606. auto cloned_abstract = cloned_parameter_node->abstract()->Clone();
  1607. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1608. cloned_abstract->set_shape(cloned_from_node->abstract()->GetShapeTrack());
  1609. cloned_parameter_node->set_abstract(cloned_abstract);
  1610. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name()
  1611. << " is cloned, the be cloned parameter is: " << cloned_from_parameter->name()
  1612. << ", clone index is: " << cloned_index;
  1613. } else {
  1614. MS_LOG(EXCEPTION) << "The parameter: " << cloned_parameter->name() << " is cloned, cloned index is "
  1615. << cloned_index << ", but not found the be cloned parameter";
  1616. }
  1617. }
  1618. }
  1619. void SetVirtualDatasetStrategy(const CNodePtr &node) {
  1620. MS_EXCEPTION_IF_NULL(node);
  1621. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1622. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1623. PrimitivePtr prim = GetValueNode<PrimitivePtr>(node->input(0));
  1624. MS_EXCEPTION_IF_NULL(prim);
  1625. if (prim->name() == VIRTUAL_DATA_SET) {
  1626. CheckGlobalDeviceManager();
  1627. int64_t dev_num;
  1628. if (full_batch) {
  1629. dev_num = 1;
  1630. } else {
  1631. dev_num = SizeToLong(g_device_manager->stage_device_num());
  1632. }
  1633. auto attrs_temp = prim->attrs();
  1634. std::vector<Shapes> shape_list = ExtractShape(node);
  1635. if (shape_list.empty()) {
  1636. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1637. }
  1638. std::vector<ValuePtr> elements;
  1639. for (size_t i = 0; i < shape_list[0].size(); i++) {
  1640. if (shape_list[0][i].empty()) {
  1641. MS_LOG(EXCEPTION) << "shape_list[ " << i << " ].size() is zero";
  1642. }
  1643. Dimensions input_strategy = {dev_num};
  1644. for (size_t j = 1; j < shape_list[0][i].size(); j++) {
  1645. input_strategy.push_back(1);
  1646. }
  1647. elements.push_back(MakeValue(input_strategy));
  1648. }
  1649. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1650. attrs_temp[STRATEGY] = strategy;
  1651. (void)prim->SetAttrs(attrs_temp);
  1652. }
  1653. }
  1654. // find previous parallel care node.
  1655. bool FindPreNodes(const AnfNodePtr &node, vector<std::string> *unique_ids) {
  1656. MS_EXCEPTION_IF_NULL(unique_ids);
  1657. // if previous node is a parameter, handle it in the outsize.
  1658. if (node->isa<Parameter>()) {
  1659. return false;
  1660. }
  1661. if (!node->isa<CNode>()) {
  1662. return false;
  1663. }
  1664. CNodePtr cnode = node->cast<CNodePtr>();
  1665. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1666. return false;
  1667. }
  1668. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1669. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1670. if (IsParallelCareNode(cnode) && prim->name() != MAKE_TUPLE && prim->name() != MAKE_LIST) {
  1671. unique_ids->push_back(cnode->UniqueId());
  1672. return true;
  1673. }
  1674. bool find = false;
  1675. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1676. if (prim->name() == DEPEND && index != 1) {
  1677. continue;
  1678. }
  1679. if (FindPreNodes(cnode->inputs()[index], unique_ids)) {
  1680. find = true;
  1681. continue;
  1682. }
  1683. }
  1684. return find;
  1685. }
  1686. void FindLastNodesUniqueId(const std::vector<AnfNodePtr> &all_nodes, std::vector<std::string> *unique_ids) {
  1687. MS_EXCEPTION_IF_NULL(unique_ids);
  1688. for (auto &node : all_nodes) {
  1689. auto cnode = node->cast<CNodePtr>();
  1690. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1691. continue;
  1692. }
  1693. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1694. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1695. if (prim->name() == RETURN) {
  1696. if (!FindPreNodes(cnode, unique_ids)) {
  1697. MS_LOG(WARNING) << "cannot find the last parallel care node in eval graph";
  1698. }
  1699. }
  1700. }
  1701. }
  1702. StrategyPtr GenerateBatchParallelStrategy(const OperatorInfoPtr operator_, const PrimitivePtr prim) {
  1703. MS_EXCEPTION_IF_NULL(operator_);
  1704. MS_EXCEPTION_IF_NULL(prim);
  1705. StrategyPtr strategyPtr;
  1706. std::shared_ptr<Strategys> strategy_v_ptr = operator_->GenerateBatchStrategies();
  1707. MS_EXCEPTION_IF_NULL(strategy_v_ptr);
  1708. strategyPtr = NewStrategy(0, *strategy_v_ptr);
  1709. std::vector<ValuePtr> elements;
  1710. for (size_t i = 0; i < strategy_v_ptr->size(); i++) {
  1711. elements.push_back(MakeValue((*strategy_v_ptr)[i]));
  1712. }
  1713. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1714. // display the strategy generated by batch parallel
  1715. auto attrs = prim->attrs();
  1716. attrs[GEN_STRATEGY] = strategy;
  1717. (void)prim->SetAttrs(attrs);
  1718. MS_LOG(INFO) << "prim " << prim->name() << " batch parallel strategy is " << attrs[GEN_STRATEGY]->ToString();
  1719. return strategyPtr;
  1720. }
  1721. void SetLastNodeStrategy(const StrategyPtr strategyPtr) {
  1722. auto strategys = strategyPtr->GetInputDim();
  1723. for (size_t i = 0; i < strategys.size(); ++i) {
  1724. for (size_t j = 0; j < strategys[i].size(); ++j) {
  1725. strategys[i][j] = 1;
  1726. }
  1727. }
  1728. strategyPtr->ResetInputs(strategys);
  1729. }
  1730. void ExtractInformation(const std::vector<AnfNodePtr> &all_nodes, bool is_training) {
  1731. // load strategy map from checkpoint
  1732. StrategyMap stra_map;
  1733. if (StrategyCheckpoint::GetInstance().LoadCheckPointOn()) {
  1734. if (StrategyCheckpoint::GetInstance().Load(&stra_map) != SUCCESS) {
  1735. MS_LOG(EXCEPTION) << "Load strategy checkpoint failed";
  1736. }
  1737. }
  1738. vector<std::string> last_forward_node_ids;
  1739. if (!is_training) {
  1740. FindLastNodesUniqueId(all_nodes, &last_forward_node_ids);
  1741. MS_LOG(INFO) << "there are " << last_forward_node_ids.size() << " output nodes in eval/predict";
  1742. }
  1743. for (auto &node : all_nodes) {
  1744. auto cnode = node->cast<CNodePtr>();
  1745. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1746. continue;
  1747. }
  1748. SetVirtualDatasetStrategy(cnode);
  1749. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1750. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1751. if (prim->name() == MAKE_TUPLE || prim->name() == MAKE_LIST || prim->name() == RECEIVE) {
  1752. continue;
  1753. }
  1754. auto attrs = prim->attrs();
  1755. MS_LOG(INFO) << "extract information: node: " << node->ToString() << " prim " << prim->name();
  1756. if (IsParallelCareNode(cnode)) {
  1757. std::vector<Shapes> shape_list = ExtractShape(cnode);
  1758. if (shape_list.empty()) {
  1759. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1760. }
  1761. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1762. if (operator_ == nullptr) {
  1763. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->name() << " OperatorInstance failed";
  1764. }
  1765. auto &inputs = cnode->inputs();
  1766. std::vector<ValuePtr> input_value;
  1767. for (size_t index = 1; index < inputs.size(); ++index) {
  1768. if (inputs[index]->isa<ValueNode>()) {
  1769. input_value.push_back(GetValueNode(inputs[index]));
  1770. } else {
  1771. input_value.emplace_back(nullptr);
  1772. }
  1773. }
  1774. StrategyPtr strategyPtr = nullptr;
  1775. (*operator_).set_input_value(input_value);
  1776. (*operator_).set_outputs_dtype(cnode->Type());
  1777. (*operator_).set_cnode(cnode);
  1778. if (prim->name() == RESHAPE) {
  1779. cnode->set_user_data<OperatorInfo>(operator_);
  1780. continue;
  1781. }
  1782. // load strategy checkpoint
  1783. // key of strategy map
  1784. std::string strategy_key_name = "";
  1785. auto param_names = NodeParameterName(cnode);
  1786. if (!param_names.empty()) {
  1787. strategy_key_name = prim->name() + "_" + param_names[0].first;
  1788. }
  1789. bool load_strategy_from_ckpt =
  1790. StrategyCheckpoint::GetInstance().LoadCheckPointOn() && stra_map.find(strategy_key_name) != stra_map.end();
  1791. bool is_last_nodes = std::find(last_forward_node_ids.begin(), last_forward_node_ids.end(), cnode->UniqueId()) !=
  1792. last_forward_node_ids.end();
  1793. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1794. if ((is_last_nodes && !full_batch) || (!StrategyFound(attrs) && !load_strategy_from_ckpt)) {
  1795. MS_LOG(INFO) << "ExtractInformation: the strategy of node " << node->ToString() << " prim " << prim->name()
  1796. << " is empty, using batch parallel";
  1797. strategyPtr = GenerateBatchParallelStrategy(operator_, prim);
  1798. } else if (StrategyFound(attrs)) {
  1799. strategyPtr = ExtractStrategy(attrs);
  1800. } else {
  1801. strategyPtr = stra_map[strategy_key_name];
  1802. }
  1803. if (strategyPtr != nullptr) {
  1804. if (is_last_nodes && full_batch) {
  1805. SetLastNodeStrategy(strategyPtr);
  1806. }
  1807. if (operator_->Init(strategyPtr) == FAILED) {
  1808. MS_LOG(EXCEPTION) << "Failure:operator " << prim->name() << " init failed";
  1809. }
  1810. cnode->set_user_data<OperatorInfo>(operator_);
  1811. } else {
  1812. MS_LOG(EXCEPTION) << "ERROR:strategy_ptr is nullptr";
  1813. }
  1814. }
  1815. }
  1816. }
  1817. TensorLayout GetInputLayoutFromCNode(const std::pair<AnfNodePtr, int64_t> &node_pair) {
  1818. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1819. MS_EXCEPTION_IF_NULL(cnode);
  1820. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1821. MS_EXCEPTION_IF_NULL(distribute_operator);
  1822. int64_t index = node_pair.second;
  1823. if (index > SizeToLong(distribute_operator->inputs_tensor_info().size())) {
  1824. MS_LOG(EXCEPTION) << "The index is out of range, the node_pair.second is " << index - 1 << ", the vector size is "
  1825. << distribute_operator->inputs_tensor_info().size();
  1826. }
  1827. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[LongToSize(index - 1)];
  1828. TensorLayout tensorlayout_in = tensorinfo_in.tensor_layout();
  1829. return tensorlayout_in;
  1830. }
  1831. // if reshape's output connect to several primitive, return the first layout found
  1832. std::shared_ptr<TensorLayout> FindNextLayout(const CNodePtr &cnode) {
  1833. MS_EXCEPTION_IF_NULL(cnode);
  1834. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  1835. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  1836. MS_EXCEPTION_IF_NULL(manager);
  1837. AnfNodeIndexSet node_set = manager->node_users()[cnode];
  1838. for (auto &node_pair : node_set) {
  1839. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1840. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1841. continue;
  1842. }
  1843. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1844. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1845. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1846. MS_EXCEPTION_IF_NULL(node_prim);
  1847. MS_LOG(INFO) << "FindNextLayout prim " << node_prim->name();
  1848. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1849. continue;
  1850. }
  1851. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1852. MS_LOG(INFO) << "FindNextLayout success prim " << node_prim->name();
  1853. auto layout = GetInputLayoutFromCNode(node_pair);
  1854. return std::make_shared<TensorLayout>(layout);
  1855. }
  1856. MS_LOG(DEBUG) << "FindNextLayout failed prim " << node_prim->name() << " " << IsParallelCareNode(use_apply)
  1857. << " " << use_apply->has_user_data<OperatorInfo>();
  1858. auto layout_ptr = FindNextLayout(use_apply);
  1859. if (layout_ptr) {
  1860. return layout_ptr;
  1861. }
  1862. }
  1863. MS_LOG(WARNING) << "FindNextLayout return nullptr, if reshape is not the last primitive, there must be some error";
  1864. return nullptr;
  1865. }
  1866. std::shared_ptr<TensorLayout> GetOutputLayoutFromCNode(const CNodePtr &cnode, size_t output_index) {
  1867. MS_EXCEPTION_IF_NULL(cnode);
  1868. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1869. MS_EXCEPTION_IF_NULL(distribute_operator);
  1870. if (distribute_operator->outputs_tensor_info().size() < output_index) {
  1871. MS_LOG(EXCEPTION) << "outputs_tensor_info size is " << distribute_operator->inputs_tensor_info().size()
  1872. << ", must be less than output_index " << output_index;
  1873. }
  1874. TensorInfo tensorinfo_out = distribute_operator->outputs_tensor_info()[output_index];
  1875. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  1876. return std::make_shared<TensorLayout>(tensorlayout_out);
  1877. }
  1878. std::shared_ptr<TensorLayout> FindPrevParallelCareNodeLayout(const AnfNodePtr &node, size_t output_index) {
  1879. if (!node->isa<CNode>()) {
  1880. return nullptr;
  1881. }
  1882. CNodePtr cnode = node->cast<CNodePtr>();
  1883. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1884. return nullptr;
  1885. }
  1886. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>()) {
  1887. auto layout_ptr = GetOutputLayoutFromCNode(cnode, output_index);
  1888. if (!layout_ptr) {
  1889. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1890. }
  1891. return layout_ptr;
  1892. }
  1893. return nullptr;
  1894. }
  1895. std::shared_ptr<TensorLayout> FindParameterNextLayout(const AnfNodePtr &node) {
  1896. FuncGraphManagerPtr manager = node->func_graph()->manager();
  1897. MS_EXCEPTION_IF_NULL(manager);
  1898. AnfNodeIndexSet node_set = manager->node_users()[node];
  1899. for (auto &node_pair : node_set) {
  1900. if (IsPrimitiveCNode(node_pair.first, prim::kPrimLoad)) {
  1901. auto layout_param = FindParameterNextLayout(node_pair.first);
  1902. if (!layout_param) {
  1903. continue;
  1904. }
  1905. return layout_param;
  1906. }
  1907. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1908. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1909. continue;
  1910. }
  1911. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1912. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1913. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1914. MS_EXCEPTION_IF_NULL(node_prim);
  1915. if ((node_prim->name() == DEPEND && node_pair.second != 1) || node_prim->name() == RESHAPE) {
  1916. continue;
  1917. }
  1918. if (IsParallelCareNode(use_apply) && use_apply->has_user_data<OperatorInfo>()) {
  1919. auto layout = GetInputLayoutFromCNode(node_pair);
  1920. return std::make_shared<TensorLayout>(layout);
  1921. }
  1922. }
  1923. return nullptr;
  1924. }
  1925. std::shared_ptr<TensorLayout> CreateParameterLayout(const AnfNodePtr &node) {
  1926. // Create DataParallel tensor layout for parameter(support WideDeep).
  1927. auto next_layout = FindParameterNextLayout(node);
  1928. if (next_layout != nullptr) {
  1929. return next_layout;
  1930. }
  1931. CheckGlobalDeviceManager();
  1932. int64_t dev_num = g_device_manager->stage_device_num();
  1933. TensorLayout input_tensor_layout;
  1934. // create input_shape
  1935. Shapes inputs_shape = GetNodeShape(node);
  1936. Shape input_shape_array = inputs_shape[0];
  1937. if (input_shape_array.empty()) {
  1938. MS_LOG(EXCEPTION) << "Don't support reshape a scalar parameter.";
  1939. }
  1940. // create tensor_map
  1941. size_t shape_size = input_shape_array.size();
  1942. TensorMap input_tensor_map_array(SizeToLong(shape_size) - 1, -1);
  1943. input_tensor_map_array.insert(input_tensor_map_array.begin(), 0);
  1944. // create dev_matrix
  1945. Shape dev_matrix_array = {dev_num};
  1946. if (input_tensor_layout.InitFromVector(dev_matrix_array, input_tensor_map_array, input_shape_array) != SUCCESS) {
  1947. MS_LOG(EXCEPTION) << "Create tensor layout for parameter failed.";
  1948. }
  1949. return std::make_shared<TensorLayout>(input_tensor_layout);
  1950. }
  1951. RedistributionOpListPtr InferSensRedistribution(const AnfNodePtr &node, const TensorLayout &loss_layout) {
  1952. MS_EXCEPTION_IF_NULL(node);
  1953. TensorRedistribution tensor_redistribution;
  1954. // create stand alone layout:TensorMap:[all -1],dev_matrix:[dev_num].
  1955. CheckGlobalDeviceManager();
  1956. int64_t dev_num = g_device_manager->stage_device_num();
  1957. TensorLayout stand_alone_layout;
  1958. Shapes inputs_shape = GetNodeShape(node);
  1959. if (inputs_shape.empty()) {
  1960. MS_LOG(EXCEPTION) << "InferSensRedistribution failed cause inputs shape is empty.";
  1961. }
  1962. Shape input_shape_array = inputs_shape[0];
  1963. if (input_shape_array.empty()) {
  1964. MS_LOG(INFO) << "No need to redistribution for sens.";
  1965. return nullptr;
  1966. }
  1967. // TensorMap
  1968. TensorMap stand_alone_tensor_map_array(SizeToLong(input_shape_array.size()), -1);
  1969. // Dev_matrix
  1970. Shape dev_matrix_array = {dev_num};
  1971. if (stand_alone_layout.InitFromVector(dev_matrix_array, stand_alone_tensor_map_array, input_shape_array) == FAILED) {
  1972. MS_LOG(EXCEPTION) << "Create tensor layout for Sens failed.";
  1973. }
  1974. // Infer Redistribution op list for stand alone and loss layout.
  1975. RankList dev_list = g_device_manager->GetDeviceListInThisStage();
  1976. if (tensor_redistribution.Init(stand_alone_layout, loss_layout, dev_list) == FAILED) {
  1977. MS_LOG(EXCEPTION) << "Redistribution for Sens init failed.";
  1978. }
  1979. RedistributionOpListPtr sens_redistribution_list = tensor_redistribution.InferTensorRedistributionOperatorList();
  1980. MS_EXCEPTION_IF_NULL(sens_redistribution_list);
  1981. return sens_redistribution_list;
  1982. }
  1983. std::shared_ptr<TensorLayout> FindPrevLayout(const AnfNodePtr &node) {
  1984. if (node->isa<Parameter>()) {
  1985. return CreateParameterLayout(node);
  1986. }
  1987. if (!node->isa<CNode>()) {
  1988. return nullptr;
  1989. }
  1990. CNodePtr cnode = node->cast<CNodePtr>();
  1991. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1992. return nullptr;
  1993. }
  1994. if (IsParallelCareNode(cnode) && cnode->has_user_data<OperatorInfo>() &&
  1995. !IsPrimitiveCNode(node, prim::kPrimReshape)) {
  1996. auto layout_ptr = GetOutputLayoutFromCNode(cnode, 0);
  1997. if (!layout_ptr) {
  1998. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1999. }
  2000. return layout_ptr;
  2001. }
  2002. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  2003. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  2004. if (prim->name() == prim::kTupleGetItem) {
  2005. auto tuple_index = GetTupleGetItemIndex(cnode);
  2006. auto layout_ptr = FindPrevParallelCareNodeLayout(cnode->input(1), LongToSize(tuple_index));
  2007. if (!layout_ptr) {
  2008. MS_LOG(EXCEPTION)
  2009. << " Failure:FindPrevLayout failed, tuple_getitem before reshape, but there does not exit a parallel care node "
  2010. "before tuple_getitem!";
  2011. }
  2012. return layout_ptr;
  2013. }
  2014. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  2015. if (prim->name() == DEPEND && index != 1) {
  2016. continue;
  2017. }
  2018. auto layout_ptr = FindPrevLayout(cnode->inputs()[index]);
  2019. if (!layout_ptr) {
  2020. continue;
  2021. }
  2022. return layout_ptr;
  2023. }
  2024. MS_LOG(WARNING) << "FindPrevLayout return nullptr, if reshape is not the first primitive, there must be some error";
  2025. return nullptr;
  2026. }
  2027. void ReshapeInit(const std::vector<AnfNodePtr> &all_nodes) {
  2028. for (auto &node : all_nodes) {
  2029. auto cnode = node->cast<CNodePtr>();
  2030. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  2031. continue;
  2032. }
  2033. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  2034. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  2035. continue;
  2036. }
  2037. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  2038. MS_EXCEPTION_IF_NULL(prim);
  2039. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  2040. if (operator_info == nullptr) {
  2041. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->ToString() << " OperatorInstance is nullptr";
  2042. }
  2043. if (prim->name() != RESHAPE) {
  2044. continue;
  2045. }
  2046. auto attrs = prim->attrs();
  2047. if (StrategyFound(attrs)) {
  2048. MS_LOG(EXCEPTION) << "Setting strategy for Reshape goes for nothing!";
  2049. }
  2050. MS_ASSERT(cnode->inputs().size() == 3);
  2051. auto prev_layout_ptr = FindPrevLayout(cnode->input(1));
  2052. if (prev_layout_ptr) {
  2053. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  2054. reshape_info_ptr->SetInputLayout(*prev_layout_ptr);
  2055. }
  2056. auto next_layout_ptr = FindNextLayout(cnode);
  2057. if (next_layout_ptr) {
  2058. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  2059. reshape_info_ptr->SetOutputLayout(*next_layout_ptr);
  2060. }
  2061. if (operator_info->Init(nullptr) == FAILED) {
  2062. MS_LOG(EXCEPTION) << "Failure:operator " << prim->ToString() << " init failed";
  2063. }
  2064. }
  2065. }
  2066. CNodePtr HandleDependLoss(const CNodePtr &cnode) {
  2067. // Handle return->depend->loss
  2068. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2069. MS_EXCEPTION_IF_NULL(prim);
  2070. if (prim->name() == DEPEND) {
  2071. auto depend_before = cnode->input(1)->cast<CNodePtr>();
  2072. MS_EXCEPTION_IF_NULL(depend_before);
  2073. return HandleDependLoss(depend_before);
  2074. }
  2075. return cnode;
  2076. }
  2077. LossNodeInfo FindLossCNode(const FuncGraphPtr &func_graph) {
  2078. LossNodeInfo loss_node_info;
  2079. MS_EXCEPTION_IF_NULL(func_graph);
  2080. CNodePtr return_node = func_graph->get_return();
  2081. MS_EXCEPTION_IF_NULL(return_node);
  2082. if (return_node->size() < 2) {
  2083. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  2084. }
  2085. AnfNodePtr pre_node = return_node->input(1);
  2086. MS_EXCEPTION_IF_NULL(pre_node);
  2087. auto pre_cnode = pre_node->cast<CNodePtr>();
  2088. if (pre_cnode == nullptr || !IsValueNode<Primitive>(pre_cnode->input(0))) {
  2089. return loss_node_info;
  2090. }
  2091. if (!IsValueNode<Primitive>(pre_cnode->input(0))) {
  2092. MS_LOG(DEBUG) << "pre_cnode:" << pre_cnode->ToString();
  2093. return loss_node_info;
  2094. }
  2095. auto prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  2096. // return -> cast
  2097. if (prim->name() == CAST && !pre_cnode->has_user_data<OperatorInfo>()) {
  2098. pre_cnode = pre_cnode->input(1)->cast<CNodePtr>();
  2099. MS_EXCEPTION_IF_NULL(pre_cnode);
  2100. }
  2101. pre_cnode = HandleDependLoss(pre_cnode);
  2102. auto current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  2103. // notice: the GetNext op has not input
  2104. if (INVALID_LOSS_OPS.find(current_prim->name()) != INVALID_LOSS_OPS.end()) {
  2105. MS_LOG(INFO) << "The loss is: " << current_prim->name();
  2106. loss_node_info.loss_node = pre_cnode;
  2107. return loss_node_info;
  2108. }
  2109. // size of common cnode is larger than 1
  2110. if (pre_cnode->size() < 2) {
  2111. MS_LOG(EXCEPTION) << pre_cnode->ToString() << " size( " << pre_cnode->inputs().size() << " ) is smaller than 2";
  2112. }
  2113. // return -> tuple_getitem -> loss
  2114. if (current_prim->name() == prim::kTupleGetItem) {
  2115. auto tuple_index = GetTupleGetItemIndex(pre_cnode);
  2116. AnfNodePtr pre_pre_node = pre_cnode->input(1);
  2117. MS_EXCEPTION_IF_NULL(pre_pre_node);
  2118. auto pre_pre_cnode = pre_pre_node->cast<CNodePtr>();
  2119. loss_node_info.has_tuple_getitem = true;
  2120. loss_node_info.dout_index = tuple_index;
  2121. loss_node_info.loss_node = pre_pre_cnode;
  2122. return loss_node_info;
  2123. }
  2124. // return -> make_tuple
  2125. if (current_prim->name() == MAKE_TUPLE) {
  2126. MS_LOG(WARNING) << "The loss have make_tuple, it is not supported";
  2127. return loss_node_info;
  2128. }
  2129. // return -> loss
  2130. loss_node_info.loss_node = pre_cnode;
  2131. MS_LOG(DEBUG) << "The loss name is " << current_prim->name();
  2132. return loss_node_info;
  2133. }
  2134. TensorLayouts GetLossNodeGradOutputLayout(const LossNodeInfo &node_info) {
  2135. TensorLayouts ret;
  2136. auto loss_cnode = node_info.loss_node;
  2137. MS_EXCEPTION_IF_NULL(loss_cnode);
  2138. ValueNodePtr prim_anf_node = loss_cnode->input(0)->cast<ValueNodePtr>();
  2139. MS_EXCEPTION_IF_NULL(prim_anf_node);
  2140. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  2141. MS_EXCEPTION_IF_NULL(prim);
  2142. if (INVALID_LOSS_OPS.find(prim->name()) != INVALID_LOSS_OPS.end()) {
  2143. MS_LOG(WARNING) << "The loss name is: " << prim->name() << ", do nothing for split sens now";
  2144. return ret;
  2145. }
  2146. OperatorInfoPtr operator_info = loss_cnode->user_data<OperatorInfo>();
  2147. MS_EXCEPTION_IF_NULL(operator_info);
  2148. TensorInfo loss_grad_tensor_info;
  2149. size_t op_output_size = operator_info->outputs_tensor_info().size();
  2150. MS_LOG(INFO) << "The loss name is " << operator_info->name() << ", the has tuple item is "
  2151. << node_info.has_tuple_getitem << ", the output size is " << op_output_size << ", the dout_index is "
  2152. << node_info.dout_index;
  2153. if ((op_output_size == 0) || (op_output_size <= LongToSize(node_info.dout_index))) {
  2154. MS_LOG(EXCEPTION) << "The index is " << node_info.dout_index << ", but the size of outputs is " << op_output_size;
  2155. }
  2156. if (!node_info.has_tuple_getitem && (op_output_size > 1)) {
  2157. MS_LOG(EXCEPTION) << "Currently, it is not supported that the sens is a tuple.";
  2158. }
  2159. loss_grad_tensor_info = operator_info->outputs_tensor_info()[LongToSize(node_info.dout_index)];
  2160. ret.push_back(loss_grad_tensor_info.tensor_layout());
  2161. return ret;
  2162. }
  2163. void SplitSens(const CNodePtr &grad_sens_node, const TensorLayout &loss_grad_layout) {
  2164. MS_EXCEPTION_IF_NULL(grad_sens_node);
  2165. if (grad_sens_node->size() <= 1) {
  2166. MS_LOG(EXCEPTION) << "The size of grad sens node is smaller than 2";
  2167. }
  2168. AnfNodePtr sens_tensor_node = grad_sens_node->input(1);
  2169. MS_EXCEPTION_IF_NULL(sens_tensor_node);
  2170. Shapes sens_shapes = GetNodeShape(sens_tensor_node);
  2171. if (sens_shapes.size() != 1) {
  2172. MS_LOG(EXCEPTION) << "GetNodeShape for sens_tensor_node, output size is not 1";
  2173. }
  2174. // If the shape of sens tensor is [] or [1], no need to split it.
  2175. Shape sens_shape = sens_shapes[0];
  2176. if (sens_shape.empty() || ((sens_shape.size() == 1) && (sens_shape[0] == 1))) {
  2177. if (sens_tensor_node->isa<Parameter>()) {
  2178. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  2179. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  2180. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  2181. }
  2182. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", no need to split sens";
  2183. return;
  2184. }
  2185. auto loss_shape = loss_grad_layout.tensor_shape().array();
  2186. if (loss_shape != sens_shape) {
  2187. MS_LOG(EXCEPTION) << "The shape of sens is not equal to loss output, it is unsupported now. Sens shape is "
  2188. << ShapeToString(sens_shape) << ", loss shape is " << ShapeToString(loss_shape);
  2189. }
  2190. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", split it.";
  2191. if (!IsValueNode<Tensor>(sens_tensor_node)) {
  2192. if (sens_tensor_node->isa<Parameter>()) {
  2193. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  2194. AbstractBasePtr abstract = sens_tensor_node->abstract();
  2195. MS_EXCEPTION_IF_NULL(abstract);
  2196. auto slice_shape = loss_grad_layout.slice_shape().array();
  2197. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  2198. MS_EXCEPTION_IF_NULL(parallel_shape);
  2199. auto cloned_abstract = abstract->Clone();
  2200. MS_EXCEPTION_IF_NULL(cloned_abstract);
  2201. cloned_abstract->set_shape(parallel_shape);
  2202. sens_tensor_node->set_abstract(cloned_abstract);
  2203. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  2204. sens_tensor_param->set_user_data<TensorLayout>(std::make_shared<TensorLayout>(loss_grad_layout));
  2205. return;
  2206. }
  2207. if (sens_tensor_node->isa<CNode>()) {
  2208. auto op_list_ptr = InferSensRedistribution(sens_tensor_node, loss_grad_layout);
  2209. if (op_list_ptr == nullptr) {
  2210. return;
  2211. }
  2212. auto sens_tensor_cnode = sens_tensor_node->cast<CNodePtr>();
  2213. auto func_graph = grad_sens_node->func_graph();
  2214. MS_EXCEPTION_IF_NULL(func_graph);
  2215. InsertRedistribution(op_list_ptr, grad_sens_node, func_graph, 1, sens_tensor_cnode);
  2216. return;
  2217. }
  2218. MS_LOG(EXCEPTION) << "The type of sens node is not Tensor or Parameter or CNode, it is unsupported now.";
  2219. }
  2220. // Use _GetTensorSlice operator to split the sens tensor
  2221. FuncGraphPtr func_graph = grad_sens_node->func_graph(); // only cnode can get the graph
  2222. MS_EXCEPTION_IF_NULL(func_graph);
  2223. Operator op = CreateGetTensorSliceOp(loss_grad_layout);
  2224. InsertGetTensorSliceOp(op, grad_sens_node, func_graph, 1, SPLIT_SENS);
  2225. }
  2226. void InsertForwardOps(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2227. MS_EXCEPTION_IF_NULL(distribute_operator);
  2228. MS_EXCEPTION_IF_NULL(cnode);
  2229. OperatorVector forward_op = distribute_operator->forward_op();
  2230. if (!forward_op.empty()) {
  2231. MS_LOG(INFO) << "Insert forward op for " << distribute_operator->name();
  2232. ForwardCommunication(forward_op, cnode);
  2233. }
  2234. }
  2235. void StepReplace(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2236. MS_EXCEPTION_IF_NULL(distribute_operator);
  2237. MS_EXCEPTION_IF_NULL(cnode);
  2238. // StepReplaceOp
  2239. OperatorVector replace_op = distribute_operator->replace_op();
  2240. if (!replace_op.empty()) {
  2241. MS_LOG(INFO) << "StepReplaceOp " << cnode->ToString();
  2242. StepReplaceOp(replace_op, cnode);
  2243. }
  2244. // StepReplaceGraph: after calling StepReplaceGraph, cnode can not be used anymore.
  2245. ReplaceGraphPtr replace_graph = distribute_operator->replace_graph(cnode);
  2246. if (!replace_op.empty() && replace_graph) {
  2247. MS_LOG(EXCEPTION) << "Only one of replace_op or replace_op can be used";
  2248. }
  2249. if (replace_graph) {
  2250. MS_LOG(INFO) << "StepReplaceGraph " << cnode->ToString();
  2251. StepReplaceGraph(replace_graph, cnode);
  2252. }
  2253. }
  2254. void HandleDropoutNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2255. MS_EXCEPTION_IF_NULL(distribute_operator);
  2256. MS_EXCEPTION_IF_NULL(cnode);
  2257. std::string op_name = distribute_operator->name();
  2258. if (op_name.find(DROPOUT_DO_MASK) == std::string::npos) {
  2259. return;
  2260. }
  2261. DropoutDoMaskInfoPtr dropout_do_mask = std::dynamic_pointer_cast<DropoutDoMaskInfo>(distribute_operator);
  2262. MS_EXCEPTION_IF_NULL(dropout_do_mask);
  2263. std::vector<Operator> replace_op = dropout_do_mask->GetDropoutGenMaskReplaceOp(cnode);
  2264. if (replace_op.empty()) {
  2265. MS_LOG(DEBUG) << "No need to replace dropout_gen_mask";
  2266. return;
  2267. }
  2268. if (cnode->inputs().size() != DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  2269. MS_LOG(EXCEPTION) << "The size of drop out do mask cnode's input is not " << DROPOUT_DO_MASK_CNODE_INPUT_SIZE;
  2270. }
  2271. ReplaceOneOp(replace_op[0], cnode->input(DROPOUT_GEN_MASK_INDEX)->cast<CNodePtr>());
  2272. }
  2273. void HandleTileNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2274. MS_EXCEPTION_IF_NULL(cnode);
  2275. if (cnode->size() < 3 || !IsValueNode<Primitive>(cnode->input(0))) {
  2276. return;
  2277. }
  2278. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2279. if (prim->name() != TILE) {
  2280. return;
  2281. }
  2282. TileInfoPtr tile = std::dynamic_pointer_cast<TileInfo>(distribute_operator);
  2283. MS_EXCEPTION_IF_NULL(tile);
  2284. tile->UpdateMultiples(cnode);
  2285. }
  2286. void HandleSpecialNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  2287. HandleDropoutNode(distribute_operator, cnode);
  2288. HandleTileNode(distribute_operator, cnode);
  2289. }
  2290. std::set<FuncGraphPtr> FindForwardGraphByRootNodes(const AnfNodeSet &root_all_nodes) {
  2291. // J->CNode->Graph
  2292. std::set<FuncGraphPtr> graph_set;
  2293. for (auto &node : root_all_nodes) {
  2294. MS_EXCEPTION_IF_NULL(node);
  2295. if (!node->isa<CNode>()) {
  2296. continue;
  2297. }
  2298. auto cnode = node->cast<CNodePtr>();
  2299. if ((cnode->size() < 2) || !IsValueNode<Primitive>(cnode->input(0))) {
  2300. continue;
  2301. }
  2302. auto expect_j_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2303. if (expect_j_prim->name() != J) {
  2304. continue;
  2305. }
  2306. if (IsValueNode<FuncGraph>(cnode->input(1))) {
  2307. auto graph = GetValueNode<FuncGraphPtr>(cnode->input(1));
  2308. MS_LOG(DEBUG) << "Find the forward graph success";
  2309. graph_set.insert(graph);
  2310. auto manager = graph->manager();
  2311. MS_EXCEPTION_IF_NULL(manager);
  2312. auto graph_used = manager->func_graphs_used_total(graph);
  2313. for (auto &sub_graph : graph_used) {
  2314. graph_set.insert(sub_graph);
  2315. }
  2316. }
  2317. }
  2318. return graph_set;
  2319. }
  2320. void StepSplitSens(const std::pair<CNodePtr, LossNodeInfo> &sens_loss_pair) {
  2321. CNodePtr sens_node = sens_loss_pair.first;
  2322. auto loss_node = sens_loss_pair.second;
  2323. auto loss_grad_layout = GetLossNodeGradOutputLayout(loss_node);
  2324. if (!loss_grad_layout.empty()) {
  2325. SplitSens(sens_node, loss_grad_layout[0]);
  2326. }
  2327. }
  2328. // Sens node satisfies the following conditions: cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  2329. std::vector<std::pair<CNodePtr, LossNodeInfo>> GetSensLossPairs(const FuncGraphPtr &root) {
  2330. MS_EXCEPTION_IF_NULL(root);
  2331. std::vector<std::pair<CNodePtr, LossNodeInfo>> sens_loss_pairs;
  2332. for (auto &node : root->nodes()) {
  2333. if (!node->isa<CNode>()) {
  2334. continue;
  2335. }
  2336. // cnode(sens)-->cnode(tuple_getitem)
  2337. auto sens_cnode = node->cast<CNodePtr>();
  2338. AnfNodePtr expect_tuple_getitem = sens_cnode->input(0);
  2339. MS_EXCEPTION_IF_NULL(expect_tuple_getitem);
  2340. if (!expect_tuple_getitem->isa<CNode>()) {
  2341. continue;
  2342. }
  2343. auto expect_tuple_getitem_cnode = expect_tuple_getitem->cast<CNodePtr>();
  2344. if (!IsSomePrimitive(expect_tuple_getitem_cnode, prim::kTupleGetItem)) {
  2345. continue;
  2346. }
  2347. // cnode(sens)-->cnode(tuple_getitem)-->cnode
  2348. AnfNodePtr expect_anonymous = expect_tuple_getitem_cnode->input(1);
  2349. MS_EXCEPTION_IF_NULL(expect_anonymous);
  2350. if (!expect_anonymous->isa<CNode>()) {
  2351. continue;
  2352. }
  2353. // cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  2354. auto expect_anonymous_cnode = expect_anonymous->cast<CNodePtr>();
  2355. AnfNodePtr expect_j = expect_anonymous_cnode->input(0);
  2356. MS_EXCEPTION_IF_NULL(expect_j);
  2357. if (!expect_j->isa<CNode>()) {
  2358. continue;
  2359. }
  2360. auto expect_j_cnode = expect_j->cast<CNodePtr>();
  2361. if (!IsSomePrimitive(expect_j_cnode, J)) {
  2362. continue;
  2363. }
  2364. if (!IsValueNode<FuncGraph>(expect_j_cnode->input(1))) {
  2365. MS_LOG(EXCEPTION) << "Sens can't find the corresponding graph.";
  2366. }
  2367. auto func_graph = GetValueNode<FuncGraphPtr>(expect_j_cnode->input(1));
  2368. auto loss_node_info = FindLossCNode(func_graph);
  2369. if (loss_node_info.loss_node == nullptr) {
  2370. MS_LOG(WARNING) << "Can not find the loss cnode";
  2371. continue;
  2372. }
  2373. std::pair<CNodePtr, LossNodeInfo> sens_loss_pair = std::make_pair(sens_cnode, loss_node_info);
  2374. sens_loss_pairs.push_back(sens_loss_pair);
  2375. }
  2376. return sens_loss_pairs;
  2377. }
  2378. bool IsLastStage() {
  2379. MS_EXCEPTION_IF_NULL(g_device_manager);
  2380. auto stage_num = g_device_manager->stage_num();
  2381. auto stage_id = g_device_manager->stage_id();
  2382. return ((stage_num - 1) == stage_id);
  2383. }
  2384. void ParallelCommunication(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes,
  2385. const FuncGraphManagerPtr &manager) {
  2386. MS_EXCEPTION_IF_NULL(root);
  2387. MS_EXCEPTION_IF_NULL(manager);
  2388. TensorRedistribution tensor_redistribution;
  2389. std::vector<std::pair<CNodePtr, LossNodeInfo>> sens_loss_pairs = GetSensLossPairs(root);
  2390. bool has_backward = !sens_loss_pairs.empty();
  2391. // split sens must before inserting the operators.
  2392. for (auto &pair : sens_loss_pairs) {
  2393. // If the shape of grad-sens tensor is not [] or [1], use get tensor slice to handle it.
  2394. // If the type of sens node is not Tensor, it is unsupported now, do nothing default.
  2395. if (IsLastStage()) {
  2396. StepSplitSens(pair);
  2397. }
  2398. }
  2399. for (auto &node : all_nodes) {
  2400. MS_EXCEPTION_IF_NULL(node);
  2401. if (node->isa<CNode>()) {
  2402. auto cnode = node->cast<CNodePtr>();
  2403. // the make_tuple is parallel care node, but it may have not operator info
  2404. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>()) {
  2405. continue;
  2406. }
  2407. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  2408. MS_EXCEPTION_IF_NULL(distribute_operator);
  2409. // insert forward ops
  2410. if (!IsSomePrimitive(cnode, RECEIVE)) {
  2411. InsertForwardOps(distribute_operator, cnode);
  2412. }
  2413. // insert redistribution ops
  2414. StepRedistribution(cnode, distribute_operator, cnode, tensor_redistribution, cnode);
  2415. // insert backward ops
  2416. if (has_backward && !IsSomePrimitive(cnode, RECEIVE)) {
  2417. BackwardCommunication(root, distribute_operator, cnode, sens_loss_pairs);
  2418. }
  2419. HandleSpecialNode(distribute_operator, cnode);
  2420. } else if (IsValueNode<Tensor>(node) || IsValueNode<ValueList>(node) || IsValueNode<ValueTuple>(node)) {
  2421. StepSplitTensor(node, manager);
  2422. }
  2423. }
  2424. for (auto &node : all_nodes) {
  2425. MS_EXCEPTION_IF_NULL(node);
  2426. if (node->isa<CNode>()) {
  2427. auto cnode = node->cast<CNodePtr>();
  2428. if (!IsParallelCareNode(cnode) || !cnode->has_user_data<OperatorInfo>() || IsSomePrimitive(cnode, RECEIVE)) {
  2429. continue;
  2430. }
  2431. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  2432. MS_EXCEPTION_IF_NULL(distribute_operator);
  2433. // StepReplace
  2434. StepReplace(distribute_operator, cnode);
  2435. }
  2436. }
  2437. }
  2438. namespace {
  2439. void RevertSymbolicKeyInstance(const FuncGraphPtr &root, const AnfNodePtr &node) {
  2440. MS_EXCEPTION_IF_NULL(root);
  2441. MS_EXCEPTION_IF_NULL(node);
  2442. auto symbolic_key = GetValueNode<SymbolicKeyInstancePtr>(node);
  2443. MS_EXCEPTION_IF_NULL(symbolic_key);
  2444. auto all_upstream_node = root->manager()->node_users()[node];
  2445. for (auto &upstream_node : all_upstream_node) {
  2446. FuncGraphPtr fg = upstream_node.first->func_graph();
  2447. if (symbolic_key->node()->isa<Parameter>()) {
  2448. for (auto &param : root->parameters()) {
  2449. if (*param == *symbolic_key->node()) {
  2450. AnfNodePtr reverted_node = root->NewCNode({NewValueNode(prim::kPrimEmbed), param});
  2451. MS_EXCEPTION_IF_NULL(reverted_node);
  2452. MS_LOG(DEBUG) << "before replace " << node->ToString() << " to node " << reverted_node->DebugString();
  2453. (void)fg->manager()->Replace(node, reverted_node);
  2454. MS_LOG(DEBUG) << "revert node " << node->ToString() << " to node " << reverted_node->DebugString();
  2455. }
  2456. }
  2457. }
  2458. }
  2459. }
  2460. } // namespace
  2461. void HandleSymbolicKeyInstance(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes) {
  2462. MS_EXCEPTION_IF_NULL(root);
  2463. for (auto &node : all_nodes) {
  2464. // revert back SymbolicKeyInstance to embed() primitive
  2465. if (IsValueNode<SymbolicKeyInstance>(node)) {
  2466. RevertSymbolicKeyInstance(root, node);
  2467. continue;
  2468. }
  2469. }
  2470. }
  2471. std::vector<std::pair<std::string, int64_t>> NodeParameterName(const CNodePtr &node) {
  2472. std::vector<AnfNodePtr> node_inputs{node->inputs()};
  2473. std::vector<std::pair<std::string, int64_t>> param_names;
  2474. for (int64_t i = 0; i < UlongToLong(node_inputs.size()); ++i) {
  2475. auto input = node_inputs[i];
  2476. if (input->isa<Parameter>()) {
  2477. auto input_parameter = input->cast<ParameterPtr>();
  2478. if (input_parameter->has_default() && ParameterRequireGrad(input_parameter)) {
  2479. param_names.push_back({input_parameter->name(), i});
  2480. }
  2481. } else if (input->isa<CNode>()) {
  2482. CNodePtr cnode = input->cast<CNodePtr>();
  2483. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2484. return param_names;
  2485. }
  2486. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  2487. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  2488. if (prim->name() == CAST && cnode->inputs().size() >= 1) {
  2489. auto cast_input = cnode->inputs()[1];
  2490. if (cast_input->isa<Parameter>()) {
  2491. auto cast_input_parameter = cast_input->cast<ParameterPtr>();
  2492. if (cast_input_parameter->has_default() && ParameterRequireGrad(cast_input_parameter)) {
  2493. param_names.push_back({cast_input_parameter->name(), i});
  2494. }
  2495. }
  2496. }
  2497. }
  2498. }
  2499. return param_names;
  2500. }
  2501. void CheckpointStrategy(const std::vector<AnfNodePtr> &all_nodes) {
  2502. StrategyMap stra_map;
  2503. TensorInfoMap tensor_info_map;
  2504. ManualShapeMap manual_shape_map;
  2505. for (auto &node : all_nodes) {
  2506. MS_EXCEPTION_IF_NULL(node);
  2507. auto cnode = node->cast<CNodePtr>();
  2508. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  2509. continue;
  2510. }
  2511. auto param_names = NodeParameterName(cnode);
  2512. if (param_names.empty()) {
  2513. continue;
  2514. }
  2515. string param_name = param_names[0].first;
  2516. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2517. MS_EXCEPTION_IF_NULL(prim);
  2518. OperatorInfoPtr operator_info = cnode->user_data<OperatorInfo>();
  2519. if (operator_info) {
  2520. if (operator_info->name().find(RESHAPEINFO) != std::string::npos) {
  2521. continue;
  2522. }
  2523. std::vector<TensorInfo> input_tensor_info = operator_info->inputs_tensor_info();
  2524. std::string stratey_key_name = prim->name() + "_" + param_name;
  2525. stra_map[stratey_key_name] = operator_info->strategy();
  2526. for (auto param_name_pair : param_names) {
  2527. if (param_name_pair.second - 1 >= UlongToLong(input_tensor_info.size())) {
  2528. continue;
  2529. }
  2530. tensor_info_map[param_name_pair.first] = input_tensor_info[param_name_pair.second - 1];
  2531. }
  2532. if (operator_info->name().find(EMBEDDING_LOOKUP) != std::string::npos ||
  2533. operator_info->name().find(GATHERV2) != std::string::npos) {
  2534. auto gatherv2_info = std::dynamic_pointer_cast<GatherPInfo>(operator_info);
  2535. auto param_split_shapes = gatherv2_info->param_split_shapes();
  2536. auto index_offsets = gatherv2_info->index_offsets();
  2537. if (param_split_shapes.size() != index_offsets.size()) {
  2538. MS_LOG(EXCEPTION) << "In manual split, the param_split_shapes and index_offsets length should be same.";
  2539. }
  2540. std::vector<std::pair<int64_t, int64_t>> manual_shape;
  2541. for (int64_t i = 0; i < UlongToLong(param_split_shapes.size()); ++i) {
  2542. manual_shape.push_back({param_split_shapes[i], index_offsets[i]});
  2543. }
  2544. manual_shape_map[param_name] = manual_shape;
  2545. }
  2546. }
  2547. }
  2548. if (StrategyCheckpoint::GetInstance().Save(stra_map, tensor_info_map, &manual_shape_map) != SUCCESS) {
  2549. MS_LOG(EXCEPTION) << "Save strategy checkpoint failed";
  2550. }
  2551. }
  2552. void SetForwardFlag(const std::vector<AnfNodePtr> &all_nodes) {
  2553. for (auto &node : all_nodes) {
  2554. MS_EXCEPTION_IF_NULL(node);
  2555. if (!node->isa<CNode>()) {
  2556. continue;
  2557. }
  2558. auto cnode = node->cast<CNodePtr>();
  2559. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2560. continue;
  2561. }
  2562. // CNode is globally unique.
  2563. MS_LOG(DEBUG) << "Set forward flag " << cnode->DebugString() << ".";
  2564. cnode->set_in_forward_flag(true);
  2565. }
  2566. }
  2567. void SetForwardFlag(const AnfNodeSet &all_nodes) {
  2568. for (auto &node : all_nodes) {
  2569. MS_EXCEPTION_IF_NULL(node);
  2570. if (!node->isa<CNode>()) {
  2571. continue;
  2572. }
  2573. auto cnode = node->cast<CNodePtr>();
  2574. if (!IsValueNode<Primitive>(cnode->input(0))) {
  2575. continue;
  2576. }
  2577. // CNode is globally unique.
  2578. cnode->set_in_forward_flag(true);
  2579. }
  2580. }
  2581. std::set<FuncGraphPtr> ForwardGraph(const FuncGraphPtr &root) {
  2582. MS_EXCEPTION_IF_NULL(root);
  2583. const auto &all_nodes = root->nodes();
  2584. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2585. return graph_set;
  2586. }
  2587. std::vector<AnfNodePtr> FindRootForwardCNode(const FuncGraphPtr &graph, const AnfNodeSet &all_nodes) {
  2588. MS_EXCEPTION_IF_NULL(graph);
  2589. std::vector<AnfNodePtr> root_forward_nodes;
  2590. auto loss_cnode = FindLossCNode(graph).loss_node;
  2591. if (loss_cnode == nullptr) {
  2592. MS_LOG(WARNING) << "Can not find the loss cnode";
  2593. return root_forward_nodes;
  2594. }
  2595. auto loss_cnode_id = loss_cnode->UniqueIdThroughCopy();
  2596. for (auto &node : all_nodes) {
  2597. MS_EXCEPTION_IF_NULL(node);
  2598. if (!node->isa<CNode>()) {
  2599. continue;
  2600. }
  2601. auto cnode = node->cast<CNodePtr>();
  2602. auto root_node_id = node->UniqueIdThroughCopy();
  2603. if (loss_cnode_id == root_node_id) {
  2604. root_forward_nodes = DeepLinkedGraphSearch(cnode);
  2605. break;
  2606. }
  2607. }
  2608. return root_forward_nodes;
  2609. }
  2610. void InsertShapeOp(const CNodePtr &node, const AnfNodePtr &pre_node, const FuncGraphPtr &root) {
  2611. // shape op doesn't have params and attrs.
  2612. OperatorParams params;
  2613. OperatorAttrs attrs;
  2614. auto shape_value = GetValueNode(node->input(2))->cast<ValueSequeuePtr>();
  2615. MS_EXCEPTION_IF_NULL(shape_value);
  2616. auto shape = shape_value->value();
  2617. if (shape.empty()) {
  2618. return;
  2619. }
  2620. OperatorArgs args = std::make_pair(attrs, params);
  2621. Operator op = std::make_pair(SHAPE_OP, args);
  2622. InsertNode(op, node, 2, pre_node, root, "shape");
  2623. }
  2624. static AnfNodePtr FindGrad(const CNodePtr &cnode) {
  2625. for (auto &node : cnode->inputs()) {
  2626. if (!node->isa<CNode>()) {
  2627. continue;
  2628. }
  2629. if (!IsPrimitiveCNode(node, prim::kPrimEnvGetItem)) {
  2630. return FindGrad(node->cast<CNodePtr>());
  2631. } else {
  2632. return node;
  2633. }
  2634. }
  2635. return nullptr;
  2636. }
  2637. void HandleRootReshapeAndSaveStrategy(const std::vector<AnfNodePtr> &all_nodes) {
  2638. // If root graph has reshape op. Find the corresponding parameter.
  2639. // Reshape's shape is the shape of the parameter.
  2640. auto executor = pipeline::ExecutorPy::GetInstance();
  2641. for (auto &node : all_nodes) {
  2642. if (!node->isa<CNode>()) {
  2643. continue;
  2644. }
  2645. auto cnode = node->cast<CNodePtr>();
  2646. if (!IsValueNode<Primitive>(cnode->input(0)) || cnode == nullptr) {
  2647. continue;
  2648. }
  2649. if (cnode->in_forward_flag()) {
  2650. // Save strategy in executor
  2651. OperatorInfoPtr op_info = cnode->user_data<OperatorInfo>();
  2652. if (op_info) {
  2653. auto stra_ptr = op_info->strategy();
  2654. if (stra_ptr) {
  2655. auto strategy = stra_ptr->GetInputDim();
  2656. // fullname with scope should be found in step parallel end ir
  2657. executor->SetCNodeStrategy(cnode->fullname_with_scope(), strategy);
  2658. }
  2659. }
  2660. continue;
  2661. }
  2662. auto prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  2663. if (prim->name() != RESHAPE) {
  2664. continue;
  2665. }
  2666. auto root = node->func_graph();
  2667. auto grad_node = FindGrad(cnode);
  2668. if (grad_node) {
  2669. InsertShapeOp(cnode, grad_node, root);
  2670. }
  2671. }
  2672. }
  2673. void MarkForwardCNode(const FuncGraphPtr &root) {
  2674. MS_EXCEPTION_IF_NULL(root);
  2675. auto all_nodes = root->nodes();
  2676. auto graph_set = FindForwardGraphByRootNodes(all_nodes);
  2677. if (graph_set.empty()) {
  2678. MS_LOG(INFO) << "Can not find the forward graph, so mark the ops in root graph";
  2679. SetForwardFlag(all_nodes);
  2680. } else {
  2681. for (auto &func_graph : graph_set) {
  2682. MS_LOG(INFO) << "The sub graph size of root is " << root->func_graphs_used().size();
  2683. auto return_node = func_graph->get_return();
  2684. MS_EXCEPTION_IF_NULL(return_node);
  2685. auto all_dfs_nodes = DeepLinkedGraphSearch(return_node);
  2686. SetForwardFlag(all_dfs_nodes);
  2687. auto root_forward_nodes = FindRootForwardCNode(func_graph, all_nodes);
  2688. if (root_forward_nodes.empty()) {
  2689. continue;
  2690. }
  2691. // Mark forward flag for the nodes in root graph.
  2692. SetForwardFlag(root_forward_nodes);
  2693. }
  2694. }
  2695. }
  2696. Status ParallelInit() {
  2697. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2698. int64_t device_num = ParallelContext::GetInstance()->device_num();
  2699. int64_t global_rank = ParallelContext::GetInstance()->global_rank();
  2700. int32_t split_stage_num = ParallelContext::GetInstance()->pipeline_stage_split_num();
  2701. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2702. auto ms_context = MsContext::GetInstance();
  2703. MS_EXCEPTION_IF_NULL(ms_context);
  2704. std::string backend = ms_context->get_param<std::string>(MS_CTX_DEVICE_TARGET);
  2705. std::string world_group;
  2706. std::string communication_backend;
  2707. if (backend == kAscendDevice || backend == kDavinciDevice) {
  2708. world_group = HCCL_WORLD_GROUP;
  2709. communication_backend = HCCL_BACKEND;
  2710. } else if (backend == kGPUDevice) {
  2711. world_group = NCCL_WORLD_GROUP;
  2712. communication_backend = NCCL_BACKEND;
  2713. } else {
  2714. MS_LOG(ERROR) << "Invalid communication backend: " << backend;
  2715. return FAILED;
  2716. }
  2717. if (split_stage_num <= 0) {
  2718. MS_LOG(ERROR) << "Invalid stage num " << split_stage_num << ", expected a positive stage number";
  2719. return FAILED;
  2720. }
  2721. uint32_t world_rank_size = 0;
  2722. if (!ParallelContext::GetInstance()->device_num_is_set()) {
  2723. if (!CommManager::GetInstance().GetRankSize(world_group, &world_rank_size)) {
  2724. MS_LOG(EXCEPTION) << "Get rank size failed";
  2725. }
  2726. device_num = UintToInt(world_rank_size);
  2727. MS_LOG(INFO) << "Get device num from communication model, the device num is " << device_num;
  2728. }
  2729. uint32_t rank_id = 0;
  2730. if (!ParallelContext::GetInstance()->global_rank_is_set()) {
  2731. if (!CommManager::GetInstance().GetRankID(world_group, &rank_id)) {
  2732. MS_LOG(EXCEPTION) << "Get rank id failed";
  2733. }
  2734. global_rank = UintToInt(rank_id);
  2735. MS_LOG(INFO) << "Get global rank from communication model, the global rank is " << global_rank;
  2736. }
  2737. if ((device_num <= 0) || (device_num > MAX_DEVICE_NUM)) {
  2738. MS_LOG(ERROR) << "Invalid device num " << device_num;
  2739. return FAILED;
  2740. }
  2741. // the device_num maybe get from communication interface
  2742. if (device_num % split_stage_num != 0) {
  2743. MS_LOG(ERROR) << "Device num " << device_num << " can't be divided by stage num " << split_stage_num;
  2744. return FAILED;
  2745. }
  2746. if ((global_rank < 0) || (global_rank >= device_num)) {
  2747. MS_LOG(ERROR) << "Global rank " << global_rank << " is out of range, the device num is " << device_num;
  2748. return FAILED;
  2749. }
  2750. std::vector<int64_t> stages;
  2751. for (int i = 0; i < split_stage_num; i++) {
  2752. stages.push_back(device_num / split_stage_num);
  2753. }
  2754. if ((split_stage_num > 1) && (parallel_mode != SEMI_AUTO_PARALLEL)) {
  2755. MS_LOG(ERROR) << "To enable the pipeline parallel, please set the parallel mode to " << SEMI_AUTO_PARALLEL;
  2756. return FAILED;
  2757. }
  2758. if (!InitDevice(device_num, global_rank, communication_backend, stages)) {
  2759. MS_LOG(ERROR) << "Init device failed";
  2760. return FAILED;
  2761. }
  2762. MS_LOG(INFO) << "The parallel context: dev num: " << device_num << ", global rank: " << global_rank
  2763. << ", backend: " << backend << ", gradients_mean: " << ParallelContext::GetInstance()->gradients_mean()
  2764. << ", gradient_fp32_sync: " << ParallelContext::GetInstance()->gradient_fp32_sync();
  2765. return SUCCESS;
  2766. }
  2767. void HandleForwardMakeTupleAndMakeList(const std::vector<AnfNodePtr> &all_nodes) {
  2768. for (auto &node : all_nodes) {
  2769. if (!AnfNodeIsPrimitive(node, MAKE_TUPLE) && !AnfNodeIsPrimitive(node, MAKE_LIST)) {
  2770. continue;
  2771. }
  2772. auto cnode = node->cast<CNodePtr>();
  2773. MS_EXCEPTION_IF_NULL(cnode);
  2774. if (!cnode->in_forward_flag()) {
  2775. continue;
  2776. }
  2777. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  2778. MS_EXCEPTION_IF_NULL(manager);
  2779. std::string op_type = AnfNodeIsPrimitive(node, MAKE_TUPLE) ? MAKE_TUPLE : MAKE_LIST;
  2780. auto make_tuple_list_user = manager->node_users()[cnode];
  2781. if (make_tuple_list_user.size() != 1) {
  2782. MS_LOG(EXCEPTION) << "Now the " << op_type << "'s user must be 1, but got " << make_tuple_list_user.size();
  2783. }
  2784. CNodePtr make_tuple_list_next_cnode = make_tuple_list_user.pop().first->cast<CNodePtr>();
  2785. MS_EXCEPTION_IF_NULL(make_tuple_list_next_cnode);
  2786. std::string make_tuple__list_user_prim_name = GetPrimName(make_tuple_list_next_cnode);
  2787. if (!IsParallelCareNode(make_tuple_list_next_cnode)) {
  2788. MS_LOG(INFO) << "The " << op_type << "'s user is " << make_tuple__list_user_prim_name
  2789. << ", no need to set operator info";
  2790. continue;
  2791. }
  2792. if (make_tuple_list_next_cnode->inputs().size() != 2) {
  2793. MS_LOG(EXCEPTION) << "Now the " << op_type << "'s user only support 1 input, but got "
  2794. << make_tuple_list_next_cnode->inputs().size() - 1;
  2795. }
  2796. MS_LOG(INFO) << "Set the " << op_type << "'s operator info, and the op name is " << make_tuple__list_user_prim_name;
  2797. OperatorInfoPtr op_info = GetDistributeOperator(make_tuple_list_next_cnode);
  2798. MS_EXCEPTION_IF_NULL(op_info);
  2799. cnode->set_user_data<OperatorInfo>(op_info);
  2800. }
  2801. }
  2802. RefKeyPair CNodeWithRefKeys(const AnfNodePtr &cnode) {
  2803. MS_EXCEPTION_IF_NULL(cnode);
  2804. std::vector<AnfNodePtr> refkeys;
  2805. if (cnode->isa<CNode>()) {
  2806. auto cnode_ptr = cnode->cast<CNodePtr>();
  2807. auto inputs = cnode_ptr->inputs();
  2808. for (auto &one_input : inputs) {
  2809. if (IsValueNode<RefKey>(one_input)) {
  2810. refkeys.push_back(one_input);
  2811. }
  2812. }
  2813. if (refkeys.size() >= 1) {
  2814. return std::make_pair(cnode, refkeys);
  2815. }
  2816. }
  2817. return {nullptr, refkeys};
  2818. }
  2819. ParameterUsersInfo FindParameterNodeUsers(const AnfNodePtr &node, bool (*IsCareNode)(const CNodePtr &)) {
  2820. // In this case, node is a Parameter
  2821. ParameterUsersInfo parameter_user_info;
  2822. MS_EXCEPTION_IF_NULL(node->func_graph());
  2823. MS_EXCEPTION_IF_NULL(node->func_graph()->manager());
  2824. auto candidate_set = node->func_graph()->manager()->node_users()[node];
  2825. for (auto &candidate : candidate_set) {
  2826. auto candidate_node = candidate.first;
  2827. if (IsPrimitiveCNode(candidate_node, prim::kPrimLoad)) {
  2828. if (candidate.second != 1) {
  2829. continue;
  2830. }
  2831. auto load_node_users = node->func_graph()->manager()->node_users()[candidate_node];
  2832. for (auto &node_user : load_node_users) {
  2833. auto cnode = node_user.first->cast<CNodePtr>();
  2834. if (cnode == nullptr || !cnode->has_user_data<OperatorInfo>() || IsSomePrimitive(cnode, RECEIVE)) {
  2835. continue;
  2836. }
  2837. (void)parameter_user_info.second.second.insert(node_user);
  2838. }
  2839. } else {
  2840. auto c = candidate_node->cast<CNodePtr>();
  2841. if (c == nullptr || !c->has_user_data<OperatorInfo>() || IsSomePrimitive(c, RECEIVE)) {
  2842. continue;
  2843. }
  2844. (void)parameter_user_info.second.second.insert(candidate);
  2845. }
  2846. }
  2847. parameter_user_info.first = node->cast<ParameterPtr>()->name();
  2848. parameter_user_info.second.first = node;
  2849. return parameter_user_info;
  2850. }
  2851. ParameterUsersInfo FindRefKeyNodeUsers(const RefKeyPair &ref_key_pair, bool (*IsCareNode)(const CNodePtr &)) {
  2852. // Dealing with the RefKey case
  2853. ParameterUsersInfo parameter_user_info;
  2854. auto refkeys = ref_key_pair.second;
  2855. auto cnode = ref_key_pair.first;
  2856. auto cnode_ptr = cnode->cast<CNodePtr>();
  2857. if ((cnode_ptr == nullptr) || !IsValueNode<Primitive>(cnode_ptr->input(0)) || !IsCareNode(cnode_ptr)) {
  2858. return parameter_user_info;
  2859. }
  2860. if (refkeys.size() > 1) {
  2861. MS_LOG(EXCEPTION) << "CNode: " << cnode->fullname_with_scope() << "'s inputs have more than 1 RefKeys";
  2862. }
  2863. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  2864. auto cnode_func_graph = cnode->func_graph();
  2865. MS_EXCEPTION_IF_NULL(cnode->func_graph()->manager());
  2866. // Find the RefKey being used
  2867. auto candidate_set_by_refkey = cnode_func_graph->manager()->node_users()[refkeys[0]];
  2868. for (auto &candidate : candidate_set_by_refkey) {
  2869. auto candidate_node = candidate.first;
  2870. auto c = candidate_node->cast<CNodePtr>();
  2871. if ((c == nullptr) || !IsValueNode<Primitive>(c->input(0)) || !IsCareNode(c)) {
  2872. continue;
  2873. }
  2874. parameter_user_info.second.second.add(candidate);
  2875. }
  2876. // Find the corresponding Parameter being used
  2877. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(refkeys[0], cnode_func_graph);
  2878. if (parameters.size() != 1) {
  2879. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  2880. }
  2881. parameter_user_info.first = parameters[0]->cast<ParameterPtr>()->name();
  2882. parameter_user_info.second.first = parameters[0];
  2883. auto candidate_set_by_para = cnode_func_graph->manager()->node_users()[parameters[0]];
  2884. for (auto &candidate : candidate_set_by_para) {
  2885. auto candidate_node = candidate.first;
  2886. auto c = candidate_node->cast<CNodePtr>();
  2887. if ((c == nullptr) || !IsValueNode<Primitive>(c->input(0)) || !IsCareNode(c)) {
  2888. continue;
  2889. }
  2890. (void)parameter_user_info.second.second.insert(candidate);
  2891. }
  2892. return parameter_user_info;
  2893. }
  2894. ParameterUsersInfo FindParameterUsers(const AnfNodePtr &node, bool (*IsCareNode)(const CNodePtr &)) {
  2895. ParameterUsersInfo parameter_users_info;
  2896. auto cnode_with_refkeys = CNodeWithRefKeys(node);
  2897. if (cnode_with_refkeys.first != nullptr) {
  2898. // the node is a ref key node
  2899. return FindRefKeyNodeUsers(cnode_with_refkeys, IsCareNode);
  2900. } else if (node->isa<Parameter>()) {
  2901. // the node is a parameter node
  2902. return FindParameterNodeUsers(node, IsCareNode);
  2903. }
  2904. return parameter_users_info;
  2905. }
  2906. Shape ParameterSliceShape(const std::pair<AnfNodePtr, int64_t> &param_info) {
  2907. auto user_cnode = param_info.first->cast<CNodePtr>();
  2908. MS_EXCEPTION_IF_NULL(user_cnode);
  2909. auto user_input_index = param_info.second;
  2910. OperatorInfoPtr op_info = user_cnode->user_data<OperatorInfo>();
  2911. MS_EXCEPTION_IF_NULL(op_info);
  2912. size_t input_tensor_info_size = op_info->inputs_tensor_info().size();
  2913. if (SizeToLong(input_tensor_info_size) <= user_input_index - 1) {
  2914. MS_LOG(EXCEPTION) << op_info->name() << ": the size of inputs tensor info is " << input_tensor_info_size
  2915. << ", but the index is " << user_input_index - 1;
  2916. }
  2917. TensorInfo tensor_info = op_info->inputs_tensor_info()[user_input_index - 1];
  2918. MS_LOG(DEBUG) << "The op name is " << op_info->name() << ", the parameter index is " << user_input_index - 1
  2919. << ", the slice shape is " << ShapeToString(tensor_info.slice_shape()) << ", the origin shape is "
  2920. << ShapeToString(tensor_info.shape());
  2921. return tensor_info.slice_shape();
  2922. }
  2923. void CheckParameterSplit(const std::vector<AnfNodePtr> &all_nodes) {
  2924. for (auto &node : all_nodes) {
  2925. ParameterUsersInfo parameter_users_info = FindParameterUsers(node, IsParallelCareNode);
  2926. auto users_set = parameter_users_info.second.second;
  2927. if (users_set.size() <= 1) {
  2928. continue;
  2929. }
  2930. auto parameter_name = parameter_users_info.first;
  2931. MS_LOG(INFO) << "The parameter: " << parameter_name << " has " << users_set.size() << " users";
  2932. auto first_user = users_set.pop();
  2933. Shape first_user_slice_shape = ParameterSliceShape(first_user);
  2934. for (auto &user : users_set) {
  2935. Shape user_slice_shape = ParameterSliceShape(user);
  2936. if (first_user_slice_shape != user_slice_shape) {
  2937. MS_LOG(EXCEPTION) << "The parameter: " << parameter_name
  2938. << " has multiple users, but the split strategies are different";
  2939. }
  2940. }
  2941. }
  2942. }
  2943. bool CreateGroupsByCkptFile(const std::string &file) {
  2944. GroupInfoMap group_info_map;
  2945. if (StrategyCheckpoint::GetInstance().LoadGroupInfo(file, &group_info_map) != SUCCESS) {
  2946. return false;
  2947. }
  2948. if (CreateGroups(group_info_map) != SUCCESS) {
  2949. return false;
  2950. }
  2951. MS_LOG(INFO) << "Create groups by checkpoint file success";
  2952. return true;
  2953. }
  2954. bool IsUsedParameter(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  2955. MS_EXCEPTION_IF_NULL(graph);
  2956. MS_EXCEPTION_IF_NULL(parameter);
  2957. auto manager = graph->manager();
  2958. auto node_users = manager->node_users()[parameter];
  2959. if (node_users.empty()) {
  2960. return false;
  2961. }
  2962. for (auto node_user : node_users) {
  2963. auto use_node = node_user.first->cast<CNodePtr>();
  2964. if (IsValueNode<FuncGraph>(use_node->input(0))) {
  2965. auto graph_sub = GetValueNode<FuncGraphPtr>(use_node->input(0));
  2966. auto parameters = graph_sub->parameters();
  2967. auto parameter_sub = parameters[node_user.second - 1];
  2968. return IsUsedParameter(graph_sub, parameter_sub);
  2969. }
  2970. if (use_node->input(0)->isa<CNode>()) {
  2971. auto cnode = use_node->input(0)->cast<CNodePtr>();
  2972. if (!IsSomePrimitive(cnode, J) || !IsValueNode<FuncGraph>(cnode->input(1))) {
  2973. return true;
  2974. }
  2975. auto graph_sub = GetValueNode<FuncGraphPtr>(cnode->input(1));
  2976. auto parameters = graph_sub->parameters();
  2977. auto parameter_sub = parameters[node_user.second - 1];
  2978. return IsUsedParameter(graph_sub, parameter_sub);
  2979. }
  2980. return true;
  2981. }
  2982. return true;
  2983. }
  2984. static void HandleNoUsedParameter(const FuncGraphPtr &root) {
  2985. MS_EXCEPTION_IF_NULL(root);
  2986. bool full_batch = ParallelContext::GetInstance()->full_batch();
  2987. if (full_batch) {
  2988. return;
  2989. }
  2990. auto dev_num = g_device_manager->stage_device_num();
  2991. auto parameters = root->parameters();
  2992. for (auto &parameter : parameters) {
  2993. if (IsUsedParameter(root, parameter)) {
  2994. continue;
  2995. }
  2996. auto parameter_shape = GetNodeShape(parameter);
  2997. if (parameter_shape.empty()) {
  2998. continue;
  2999. }
  3000. Shape slice_shape = parameter_shape[0];
  3001. if (slice_shape.empty()) {
  3002. continue;
  3003. }
  3004. slice_shape[0] = slice_shape[0] / dev_num;
  3005. auto slice_shape_ptr = std::make_shared<abstract::Shape>(slice_shape);
  3006. auto abstract = parameter->abstract();
  3007. MS_EXCEPTION_IF_NULL(abstract);
  3008. auto abstract_cloned = abstract->Clone();
  3009. MS_EXCEPTION_IF_NULL(abstract_cloned);
  3010. abstract_cloned->set_shape(slice_shape_ptr);
  3011. parameter->set_abstract(abstract_cloned);
  3012. }
  3013. }
  3014. bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) {
  3015. #if (ENABLE_CPU && (ENABLE_D || ENABLE_GPU))
  3016. if (ps::Util::IsRoleOfPServer() || ps::Util::IsRoleOfScheduler()) {
  3017. return false;
  3018. }
  3019. #endif
  3020. MS_EXCEPTION_IF_NULL(root);
  3021. MS_EXCEPTION_IF_NULL(optimizer);
  3022. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  3023. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  3024. // assume no change to graph
  3025. bool changes = false;
  3026. // control whether use model_parallel mode
  3027. if (!root->has_flag(AUTO_PARALLEL) || ((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) ||
  3028. (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) {
  3029. if (!root->has_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY)) {
  3030. if (HasStrategy(root)) {
  3031. MS_LOG(INFO) << "Strategies ignored in " << parallel_mode
  3032. << ", set_strategy() only valid in [semi_]auto_parallel.";
  3033. }
  3034. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  3035. }
  3036. return changes;
  3037. }
  3038. struct timeval start_time, end_time;
  3039. (void)gettimeofday(&start_time, nullptr);
  3040. MS_LOG(INFO) << "Now entering step parallel";
  3041. DumpGraph(root, std::string(STEP_PARALLEL_BEGIN));
  3042. pipeline::ResourceBasePtr res = optimizer->resource();
  3043. MS_EXCEPTION_IF_NULL(res);
  3044. FuncGraphManagerPtr manager = res->manager();
  3045. MS_EXCEPTION_IF_NULL(manager);
  3046. AnfNodePtr ret = root->get_return();
  3047. MS_EXCEPTION_IF_NULL(ret);
  3048. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  3049. std::reverse(all_nodes.begin(), all_nodes.end());
  3050. if (parallel_mode != AUTO_PARALLEL) {
  3051. TOTAL_OPS = 0;
  3052. auto pipeline_stages = ParallelContext::GetInstance()->pipeline_stage_split_num();
  3053. if (pipeline_stages <= 1 && ParallelInit() != SUCCESS) {
  3054. MS_LOG(EXCEPTION) << "Parallel init failed";
  3055. }
  3056. // mark the forward cnodes, parallel only care these nodes
  3057. MarkForwardCNode(root);
  3058. if (FindCommunicationOp(all_nodes)) {
  3059. MS_LOG(EXCEPTION) << "The graph contain communication op";
  3060. }
  3061. // extract shape and strategy, set operator_info
  3062. ExtractInformation(all_nodes, root->has_flag(TRAINING));
  3063. ReshapeInit(all_nodes);
  3064. }
  3065. HandleRootReshapeAndSaveStrategy(all_nodes);
  3066. HandleForwardMakeTupleAndMakeList(all_nodes);
  3067. // if the input or parameter has multiple users, check whether its split strategies are consistent.
  3068. CheckParameterSplit(all_nodes);
  3069. // save strategy as checkpoint for multi-train
  3070. if (StrategyCheckpoint::GetInstance().SaveCheckPointOn()) {
  3071. CheckpointStrategy(all_nodes);
  3072. }
  3073. HandleSymbolicKeyInstance(root, all_nodes);
  3074. // cover Parallel shape
  3075. CoverSliceShape(root);
  3076. // handle input is not used
  3077. HandleNoUsedParameter(root);
  3078. // set the shape for optimizer's clone tensor
  3079. SetClonedTensorShapeForOptimizer(root);
  3080. // ForwardCommunication BackwardCommunication TensorRedistribution
  3081. ParallelCommunication(root, all_nodes, manager);
  3082. auto group_info = g_device_manager->group_info();
  3083. if (StrategyCheckpoint::GetInstance().group_info_save_on() &&
  3084. StrategyCheckpoint::GetInstance().SaveGroupInfo(group_info) != SUCCESS) {
  3085. MS_LOG(EXCEPTION) << "Save group info failed";
  3086. }
  3087. DumpGraph(root, std::string(STEP_PARALLEL_END));
  3088. // step parallel only run once
  3089. root->set_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY, true);
  3090. res->results()[pipeline::kStepParallelGraph] = root;
  3091. // in auto parallel mode, no need to check if stategies set
  3092. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  3093. (void)gettimeofday(&end_time, nullptr);
  3094. uint64_t time = kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec - start_time.tv_sec);
  3095. time += static_cast<uint64_t>(end_time.tv_usec - start_time.tv_usec);
  3096. MS_LOG(INFO) << "Now leaving step parallel, used time: " << time << " us";
  3097. return changes;
  3098. }
  3099. // Needed by rec_parser
  3100. std::vector<std::string> ExtractInputsTensorName(const CNodePtr &node) {
  3101. std::vector<std::string> name_inputs;
  3102. std::vector<AnfNodePtr> all_inputs = node->inputs();
  3103. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  3104. std::string node_id = node->UniqueId();
  3105. name_inputs.push_back(node_id);
  3106. for (auto &input : node_inputs) {
  3107. std::string name = input->UniqueId();
  3108. name_inputs.push_back(name);
  3109. }
  3110. return name_inputs;
  3111. }
  3112. } // namespace parallel
  3113. } // namespace mindspore