You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

step_parallel.cc 94 kB

5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "parallel/step_parallel.h"
  17. #include <inttypes.h>
  18. #include <sys/time.h>
  19. #include <algorithm>
  20. #include <map>
  21. #include <memory>
  22. #include <set>
  23. #include <string>
  24. #include <unordered_map>
  25. #include <utility>
  26. #include "ir/tensor.h"
  27. #include "ir/param_value_py.h"
  28. #include "operator/ops.h"
  29. #include "optimizer/optimizer.h"
  30. #include "parallel/auto_parallel/graph_costmodel.h"
  31. #include "parallel/context.h"
  32. #include "parallel/device_manager.h"
  33. #include "parallel/dynamic_creator.h"
  34. #include "parallel/graph_util/generate_graph.h"
  35. #include "parallel/graph_util/graph_info.h"
  36. #include "parallel/graph_util/node_info.h"
  37. #include "parallel/node_check.h"
  38. #include "parallel/ops_info/matmul_info.h"
  39. #include "parallel/strategy_checkpoint/parallel_strategy_checkpoint.h"
  40. #include "utils/comm_manager.h"
  41. #include "utils/symbolic.h"
  42. #include "pipeline/static_analysis/prim.h"
  43. using mindspore::tensor::Tensor;
  44. namespace mindspore {
  45. namespace parallel {
  46. static const std::set<std::string> COMMUNICATION_OPS = {ALL_REDUCE, ALL_GATHER, ALL_TO_ALL, REDUCE_SCATTER};
  47. static const std::set<std::string> INVALID_LOSS_OPS = {GET_NEXT, VIRTUALLOSS};
  48. // g_RefMap, for CNode B input i is a RefKey[Parameter C],
  49. // it will be one item in map with key: C, and value: (B, i)
  50. static std::map<AnfNodePtr, std::pair<AnfNodePtr, int>> g_RefMap;
  51. void SetCommunicationOpGroupLabel(std::vector<AnfNodePtr> new_node_input) {
  52. if (new_node_input.empty()) {
  53. return;
  54. }
  55. ValueNodePtr prim_anf_node = new_node_input[0]->cast<ValueNodePtr>();
  56. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  57. MS_EXCEPTION_IF_NULL(prim);
  58. auto attrs = prim->attrs();
  59. auto iter = attrs.find(GROUP);
  60. if (iter != attrs.end()) {
  61. auto value = iter->second;
  62. MS_EXCEPTION_IF_NULL(value);
  63. if (value->isa<StringImm>()) {
  64. std::string hash_name = value->cast<StringImmPtr>()->value();
  65. MS_EXCEPTION_IF_NULL(g_device_manager);
  66. std::string rank_list_name = g_device_manager->FindRankListNameByHashName(hash_name);
  67. (void)prim->AddAttr(GROUP_RANKS, MakeValue(rank_list_name));
  68. }
  69. }
  70. }
  71. std::vector<AnfNodePtr> CreateInput(const Operator &op, const AnfNodePtr &node, const std::string &instance_name) {
  72. MS_EXCEPTION_IF_NULL(node);
  73. OperatorArgs arg_forward = op.second;
  74. ValuePtr pyop_instance = CreatOpInstance(arg_forward.first, op.first, instance_name);
  75. MS_EXCEPTION_IF_NULL(pyop_instance);
  76. OperatorParams params = arg_forward.second;
  77. std::vector<AnfNodePtr> new_node_input = {NewValueNode(pyop_instance), node};
  78. if (!params.empty()) {
  79. for (auto &param : params) {
  80. AnfNodePtr val = NewValueNode(param.first.second);
  81. MS_EXCEPTION_IF_NULL(val);
  82. int32_t position = param.second;
  83. (void)new_node_input.insert(new_node_input.begin() + position, val);
  84. }
  85. }
  86. // if the op have 'group' attr, set the rank list name for the op
  87. SetCommunicationOpGroupLabel(new_node_input);
  88. return new_node_input;
  89. }
  90. void InsertNode(const Operator &op, const CNodePtr &node, size_t index, const AnfNodePtr &pre_node,
  91. const FuncGraphPtr &func_graph, const std::string &instance_name) {
  92. // insert new node before the node
  93. FuncGraphManagerPtr manager = func_graph->manager();
  94. MS_EXCEPTION_IF_NULL(manager);
  95. ScopePtr scope = node->scope();
  96. MS_EXCEPTION_IF_NULL(scope);
  97. std::vector<AnfNodePtr> node_input = CreateInput(op, pre_node, instance_name);
  98. CNodePtr new_node = func_graph->NewCNode(node_input);
  99. MS_EXCEPTION_IF_NULL(new_node);
  100. if (instance_name.find(SPLIT_SENS) == std::string::npos) {
  101. new_node->set_in_forward_flag(true); // mark forward flag
  102. }
  103. auto new_node_value = node_input[0]->cast<ValueNodePtr>();
  104. MS_EXCEPTION_IF_NULL(new_node_value);
  105. PrimitivePtr new_node_prim = new_node_value->value()->cast<PrimitivePtr>();
  106. new_node_prim->set_instance_name(instance_name);
  107. new_node_prim->set_attr("keep_value_node_input", MakeValue(true));
  108. new_node->set_scope(scope);
  109. node_input[0]->set_scope(scope);
  110. manager->SetEdge(node, SizeToInt(index), new_node);
  111. }
  112. std::string CreateInstanceName(const CNodePtr &node, size_t index) {
  113. MS_EXCEPTION_IF_NULL(node);
  114. if (!IsValueNode<Primitive>(node->input(0))) {
  115. MS_LOG(EXCEPTION) << "CreateInstanceName: " << node->ToString() << " doesn't have primitive";
  116. }
  117. std::string name_base = node->fullname_with_scope();
  118. std::string name = name_base + "_" + std::to_string(index);
  119. std::string instance_name = HashInstanceName(name);
  120. return instance_name;
  121. }
  122. void ForwardCommunication(OperatorVector forward_op, const CNodePtr &node) {
  123. MS_EXCEPTION_IF_NULL(node);
  124. // step1:get graph manager distribute_operator
  125. FuncGraphPtr func_graph = node->func_graph();
  126. MS_EXCEPTION_IF_NULL(func_graph);
  127. FuncGraphManagerPtr manager = func_graph->manager();
  128. MS_EXCEPTION_IF_NULL(manager);
  129. auto uses_set = manager->node_users()[node];
  130. CNodePtr node_to_insert = node;
  131. for (auto &uses_pair : uses_set) {
  132. auto uses_cnode = uses_pair.first->cast<CNodePtr>();
  133. MS_EXCEPTION_IF_NULL(uses_cnode);
  134. if (!IsValueNode<Primitive>(uses_cnode->input(0))) {
  135. break;
  136. }
  137. PrimitivePtr value_node_prim = GetValueNode<PrimitivePtr>(uses_cnode->input(0));
  138. MS_EXCEPTION_IF_NULL(value_node_prim);
  139. if (value_node_prim->name() == TUPLE_GETITEM) {
  140. if (uses_set.size() > 1) {
  141. MS_LOG(EXCEPTION) << "Now only support one output, but got " << uses_set.size();
  142. }
  143. node_to_insert = uses_cnode;
  144. }
  145. }
  146. MS_EXCEPTION_IF_NULL(node_to_insert);
  147. std::reverse(forward_op.begin(), forward_op.end());
  148. // step2:traverse op_list and insert node
  149. for (size_t index = 0; index < forward_op.size(); ++index) {
  150. std::string instance_name_base = FORWARD_OP;
  151. std::string instance_name = instance_name_base + "_" + CreateInstanceName(node, index);
  152. std::vector<AnfNodePtr> forward_input = CreateInput(forward_op[index], node_to_insert, instance_name);
  153. CNodePtr forward_node = func_graph->NewCNode(forward_input); // using NewCNode to creat anfnode
  154. MS_EXCEPTION_IF_NULL(forward_node);
  155. ScopePtr scope = node->scope();
  156. MS_EXCEPTION_IF_NULL(scope);
  157. forward_node->set_scope(scope);
  158. forward_node->set_in_forward_flag(true);
  159. forward_input[0]->set_scope(scope);
  160. (void)manager->Replace(node_to_insert, forward_node); // using Replace function to insert node
  161. }
  162. }
  163. CNodePtr InsertMakeTuple(const AnfNodePtr &prev, uint32_t num, const FuncGraphPtr &func_graph) {
  164. MS_EXCEPTION_IF_NULL(prev);
  165. MS_EXCEPTION_IF_NULL(func_graph);
  166. std::vector<AnfNodePtr> make_tuple_inputs;
  167. make_tuple_inputs.push_back(NewValueNode(prim::kPrimMakeTuple));
  168. for (uint32_t i = 0; i < num; i++) {
  169. std::vector<AnfNodePtr> tuple_get_item_inputs{NewValueNode(prim::kPrimTupleGetItem), prev,
  170. CreatInt32Imm(UintToInt(i))};
  171. auto tuple_get_item = func_graph->NewCNode(tuple_get_item_inputs);
  172. MS_EXCEPTION_IF_NULL(tuple_get_item);
  173. make_tuple_inputs.push_back(tuple_get_item);
  174. }
  175. auto make_tuple = func_graph->NewCNode(make_tuple_inputs);
  176. MS_EXCEPTION_IF_NULL(make_tuple);
  177. FuncGraphManagerPtr manager = func_graph->manager();
  178. MS_EXCEPTION_IF_NULL(manager);
  179. (void)manager->Replace(prev, make_tuple);
  180. return make_tuple;
  181. }
  182. void InsertRedistribution(const RedistributionOpListPtr &redistribution_oplist_ptr, const CNodePtr &node,
  183. const FuncGraphPtr &func_graph, int pos, const CNodePtr &pre_node) {
  184. MS_EXCEPTION_IF_NULL(node);
  185. MS_EXCEPTION_IF_NULL(pre_node);
  186. MS_EXCEPTION_IF_NULL(func_graph);
  187. FuncGraphManagerPtr manager = func_graph->manager();
  188. MS_EXCEPTION_IF_NULL(manager);
  189. if ((redistribution_oplist_ptr->first).size() != (redistribution_oplist_ptr->second).size()) {
  190. MS_LOG(EXCEPTION) << "size of OperatorVector and OutPutInfoVector must be the same!";
  191. }
  192. for (size_t index = 0; index < (redistribution_oplist_ptr->first).size(); ++index) {
  193. if (pos >= SizeToInt(node->inputs().size())) {
  194. MS_LOG(EXCEPTION) << "InsertRedistribution:pos can't be larger than node's inputs'size";
  195. }
  196. // Creat new node
  197. AnfNodePtr target_node = node->input(IntToSize(pos));
  198. MS_EXCEPTION_IF_NULL(target_node);
  199. // Creat instance_name
  200. auto op = (redistribution_oplist_ptr->first)[index];
  201. std::string op_name = (redistribution_oplist_ptr->first)[index].first;
  202. std::string instance_name_base = REDISTRIBUTION_OP;
  203. std::string instance_name = instance_name_base + "_" + CreateInstanceName(pre_node, index) + op_name;
  204. InsertNode(op, node, IntToSize(pos), target_node, func_graph, instance_name);
  205. if ((redistribution_oplist_ptr->second)[index].first) {
  206. target_node = node->input(IntToSize(pos));
  207. MS_EXCEPTION_IF_NULL(target_node);
  208. (void)InsertMakeTuple(target_node, (redistribution_oplist_ptr->second)[index].second, func_graph);
  209. }
  210. }
  211. }
  212. void InsertGetTensorSliceOp(const Operator &op, const CNodePtr &node, const FuncGraphPtr &func_graph, int pos,
  213. const std::string &instance_name) {
  214. if (func_graph == nullptr) {
  215. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: the graph is null, the instance name is " << instance_name;
  216. }
  217. FuncGraphManagerPtr manager = func_graph->manager();
  218. MS_EXCEPTION_IF_NULL(manager);
  219. if (pos >= SizeToInt(node->inputs().size())) {
  220. MS_LOG(EXCEPTION) << "InsertGetTensorSliceOp: pos can't be larger than node's inputs'size, the instance name is "
  221. << instance_name;
  222. }
  223. // Creat new node
  224. AnfNodePtr pre_node = node->input(IntToSize(pos));
  225. MS_EXCEPTION_IF_NULL(pre_node);
  226. InsertNode(op, node, IntToSize(pos), pre_node, func_graph, instance_name);
  227. }
  228. TensorLayout GetTensorInLayout(const CNodePtr &middle_node, const PrimitivePtr &middle_prim,
  229. const OperatorInfoPtr &distribute_operator) {
  230. TensorInfo tensorinfo_in;
  231. if (middle_prim->name() == TUPLE_GETITEM) {
  232. auto value_node = middle_node->input(2)->cast<ValueNodePtr>();
  233. MS_EXCEPTION_IF_NULL(value_node);
  234. size_t index_s = IntToSize(GetValue<int>(value_node->value()));
  235. if (index_s >= distribute_operator->outputs_tensor_info().size()) {
  236. MS_LOG(EXCEPTION) << "The index out of range, index: " << index_s
  237. << ", vector size: " << distribute_operator->outputs_tensor_info().size();
  238. }
  239. tensorinfo_in = distribute_operator->outputs_tensor_info()[index_s];
  240. } else {
  241. if (distribute_operator->outputs_tensor_info().empty()) {
  242. MS_LOG(EXCEPTION) << "The outputs tensor info is empty";
  243. }
  244. tensorinfo_in = distribute_operator->outputs_tensor_info()[0];
  245. }
  246. return tensorinfo_in.tensor_layout();
  247. }
  248. OperatorInfoPtr GetDistributeOperator(const CNodePtr &node) {
  249. MS_EXCEPTION_IF_NULL(node);
  250. if (!IsParallelCareNode(node)) {
  251. return nullptr;
  252. }
  253. OperatorInfoPtr distribute_operator = node->operator_info();
  254. if (distribute_operator == nullptr) {
  255. MS_LOG(EXCEPTION) << "GetDistributeOperator:distribute_operator is nullptr";
  256. }
  257. return distribute_operator;
  258. }
  259. void Redistribution(const std::pair<AnfNodePtr, int> &node_pair, const OperatorInfoPtr &distribute_operator,
  260. const CNodePtr &middle_node, int index, TensorRedistribution tensor_redistribution,
  261. const CNodePtr &pre_node) {
  262. FuncGraphPtr func_graph = middle_node->func_graph();
  263. if (func_graph == nullptr) {
  264. MS_LOG(EXCEPTION) << "Redistribution:get graph failed";
  265. }
  266. CNodePtr next_node = node_pair.first->cast<CNodePtr>();
  267. MS_EXCEPTION_IF_NULL(next_node);
  268. auto middle_value = middle_node->input(0)->cast<ValueNodePtr>();
  269. MS_EXCEPTION_IF_NULL(middle_value);
  270. PrimitivePtr middle_prim = middle_value->value()->cast<PrimitivePtr>();
  271. MS_EXCEPTION_IF_NULL(middle_prim);
  272. OperatorInfoPtr next_distribute_operator = GetDistributeOperator(next_node);
  273. if (next_distribute_operator == nullptr) {
  274. MS_LOG(EXCEPTION) << "Failure: " << next_node->ToString() << " GetDistributeOperator failed";
  275. }
  276. RankList dev_list = distribute_operator->global_device_list();
  277. std::string next_prim_name = GetValueNode<PrimitivePtr>(next_node->input(0))->name();
  278. MS_LOG(DEBUG) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim " << next_prim_name;
  279. MS_LOG(DEBUG) << "Redistribution: middle_node " << middle_node->ToString() << " next_node " << next_node->ToString();
  280. // extract tensor layout in and out
  281. if (distribute_operator->outputs_tensor_info().empty()) {
  282. MS_LOG(EXCEPTION) << "Failure:pre_node's tensorinfo_in is empty";
  283. }
  284. if (IntToSize(index - 1) >= next_distribute_operator->inputs_tensor_info().size()) {
  285. MS_LOG(EXCEPTION) << "The index is out of range, the index is " << index - 1 << ", the vector size is "
  286. << next_distribute_operator->inputs_tensor_info().size();
  287. }
  288. TensorInfo tensorinfo_out = next_distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  289. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  290. TensorLayout tensorlayout_in = GetTensorInLayout(middle_node, middle_prim, distribute_operator);
  291. if (tensor_redistribution.Init(tensorlayout_in, tensorlayout_out, dev_list) == FAILED) {
  292. MS_LOG(ERROR) << "Redistribution: middle_prim " << middle_prim->name() << " next_prim : " << next_prim_name;
  293. MS_LOG(ERROR) << "Redistribution: middle_node " << middle_node->ToString() << " next_node "
  294. << next_node->ToString();
  295. DumpGraph(func_graph, "redistribution_error");
  296. MS_LOG(EXCEPTION) << "Failure:tensor_redistribution init failed";
  297. }
  298. RedistributionOpListPtr redistribution_oplist_ptr = tensor_redistribution.InferTensorRedistributionOperatorList();
  299. if (redistribution_oplist_ptr == nullptr) {
  300. MS_LOG(EXCEPTION) << "Failure:InferTensorRedistribution failed";
  301. }
  302. MS_LOG(DEBUG) << "Redistribution size " << redistribution_oplist_ptr->first.size();
  303. if (!redistribution_oplist_ptr->first.empty()) {
  304. // insert node before next node
  305. InsertRedistribution(redistribution_oplist_ptr, next_node, func_graph, node_pair.second, pre_node);
  306. }
  307. }
  308. bool StrategyFound(std::unordered_map<std::string, ValuePtr> attrs) {
  309. auto iter = attrs.find(STRATEGY);
  310. return !((iter == attrs.end()) || (iter->second->type_name() == NONE));
  311. }
  312. bool HasStrategy(const FuncGraphPtr &root) {
  313. AnfNodePtr ret = root->get_return();
  314. MS_EXCEPTION_IF_NULL(ret);
  315. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  316. for (auto &node : all_nodes) {
  317. auto cnode = node->cast<CNodePtr>();
  318. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  319. continue;
  320. }
  321. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  322. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  323. auto attrs = prim->attrs();
  324. if (StrategyFound(attrs)) {
  325. return true;
  326. }
  327. }
  328. return false;
  329. }
  330. bool IsCommunicationOp(const PrimitivePtr &prim) {
  331. MS_EXCEPTION_IF_NULL(prim);
  332. return (COMMUNICATION_OPS.find(prim->name()) != COMMUNICATION_OPS.end());
  333. }
  334. bool FindCommunicationOp(const std::vector<AnfNodePtr> &all_nodes) {
  335. for (auto &node : all_nodes) {
  336. MS_EXCEPTION_IF_NULL(node);
  337. if (!node->isa<CNode>()) {
  338. continue;
  339. }
  340. auto cnode = node->cast<CNodePtr>();
  341. if (!IsValueNode<Primitive>(cnode->input(0))) {
  342. continue;
  343. }
  344. ValueNodePtr prim_value_node = cnode->input(0)->cast<ValueNodePtr>();
  345. MS_EXCEPTION_IF_NULL(prim_value_node);
  346. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_value_node);
  347. MS_EXCEPTION_IF_NULL(prim);
  348. if (IsCommunicationOp(prim) && cnode->in_forward_flag()) {
  349. MS_EXCEPTION_IF_NULL(prim_value_node->scope());
  350. MS_LOG(INFO) << "The graph contain communication op: " << prim->name() << ", scope name is "
  351. << prim_value_node->scope()->name();
  352. return true;
  353. }
  354. }
  355. return false;
  356. }
  357. bool IsParallelCareNode(const CNodePtr &cnode) {
  358. MS_EXCEPTION_IF_NULL(cnode);
  359. ValueNodePtr prim_node = cnode->input(0)->cast<ValueNodePtr>();
  360. if (prim_node == nullptr) {
  361. return false;
  362. }
  363. PrimitivePtr prim = prim_node->value()->cast<PrimitivePtr>();
  364. if (prim == nullptr) {
  365. return false;
  366. }
  367. if (IsInBlackList(prim)) {
  368. MS_LOG(INFO) << "Parallel don't care node: " << prim->name();
  369. return false;
  370. }
  371. // get_next is not in the forward graph, we need mark the get_next as the forward node
  372. if (prim->name() == GET_NEXT) {
  373. return true;
  374. }
  375. if ((prim->name() == CAST) && (cnode->operator_info() == nullptr)) {
  376. return false;
  377. }
  378. return cnode->in_forward_flag();
  379. }
  380. void StepRedistribution(const CNodePtr &node, const OperatorInfoPtr &distribute_operator, const CNodePtr &insert_node,
  381. const TensorRedistribution &tensor_redistribution, const CNodePtr &pre_node) {
  382. MS_EXCEPTION_IF_NULL(node->func_graph());
  383. FuncGraphManagerPtr manager = node->func_graph()->manager();
  384. MS_EXCEPTION_IF_NULL(manager);
  385. AnfNodeIndexSet node_set = manager->node_users()[node];
  386. CNodePtr insert_node_new;
  387. if (IsValueNode<Primitive>(node->input(0))) {
  388. auto current_value = node->input(0)->cast<ValueNodePtr>();
  389. MS_EXCEPTION_IF_NULL(current_value);
  390. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  391. MS_EXCEPTION_IF_NULL(current_prim);
  392. insert_node_new = ((current_prim->name() == TUPLE_GETITEM) ? node : insert_node);
  393. } else {
  394. insert_node_new = insert_node;
  395. }
  396. MS_EXCEPTION_IF_NULL(insert_node_new);
  397. for (auto &node_pair : node_set) {
  398. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  399. MS_EXCEPTION_IF_NULL(use_cnode);
  400. if (!IsValueNode<Primitive>(use_cnode->input(0))) {
  401. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  402. } else {
  403. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  404. MS_EXCEPTION_IF_NULL(prim_anf_node);
  405. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  406. MS_EXCEPTION_IF_NULL(node_prim);
  407. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  408. continue;
  409. }
  410. if (IsParallelCareNode(use_cnode) && (use_cnode->operator_info() != nullptr)) {
  411. Redistribution(node_pair, distribute_operator, insert_node_new, node_pair.second, tensor_redistribution,
  412. pre_node);
  413. } else {
  414. StepRedistribution(use_cnode, distribute_operator, insert_node_new, tensor_redistribution, pre_node);
  415. }
  416. }
  417. }
  418. }
  419. void SplitTensor(const AnfNodePtr &node, const CNodePtr &next_node, int index) {
  420. MS_EXCEPTION_IF_NULL(node);
  421. MS_EXCEPTION_IF_NULL(next_node);
  422. OperatorInfoPtr op_info = next_node->operator_info();
  423. MS_EXCEPTION_IF_NULL(op_info);
  424. // If the shape of tensor is [] or [1], no need to split it.
  425. Shapes shapes = GetNodeShape(node);
  426. if (shapes.size() != 1) {
  427. MS_LOG(EXCEPTION) << "Split tensor for " << op_info->name()
  428. << ": GetNodeShape for tensor_node, output size is not 1";
  429. }
  430. Shape shape = shapes[0];
  431. std::string shape_str = ShapeToString(shape);
  432. if (shape.empty() || ((shape.size() == 1) && (shape[0] == 1))) {
  433. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape is " << shape_str
  434. << ", no need to split it.";
  435. return;
  436. }
  437. MS_LOG(INFO) << "Split tensor for " << op_info->name() << ": The shape of tensor is " << shape_str;
  438. // extract tensor layout
  439. if (IntToSize(index - 1) >= op_info->inputs_tensor_info().size()) {
  440. MS_LOG(EXCEPTION) << "The index is out of range, index is " << index - 1 << ", vector size is "
  441. << op_info->inputs_tensor_info().size();
  442. }
  443. TensorInfo tensor_info = op_info->inputs_tensor_info()[IntToSize(index - 1)];
  444. TensorLayout tensor_layout = tensor_info.tensor_layout();
  445. // Use _GetTensorSlice operator to split the tensor
  446. FuncGraphPtr func_graph = next_node->func_graph(); // only cnode can get the graph
  447. MS_EXCEPTION_IF_NULL(func_graph);
  448. Operator op = CreateGetTensorSliceOp(tensor_layout);
  449. InsertGetTensorSliceOp(op, next_node, func_graph, index, SPLIT_TENSOR);
  450. if (!op_info->sub_ops().empty()) {
  451. auto sub_ops = op_info->sub_ops();
  452. for (size_t i = 0; i < sub_ops.size(); i++) {
  453. if (!sub_ops.at(i).empty()) {
  454. InsertGetTensorSliceOp(sub_ops.at(i).at(0), next_node, func_graph, index, SUB);
  455. }
  456. }
  457. }
  458. }
  459. void StepSplitTensor(const AnfNodePtr &node, const FuncGraphManagerPtr &manager) {
  460. MS_EXCEPTION_IF_NULL(node);
  461. MS_EXCEPTION_IF_NULL(manager);
  462. AnfNodeIndexSet node_set = manager->node_users()[node];
  463. for (auto &node_pair : node_set) {
  464. CNodePtr use_cnode = node_pair.first->cast<CNodePtr>();
  465. if (use_cnode == nullptr || !IsValueNode<Primitive>(use_cnode->input(0))) {
  466. continue;
  467. }
  468. ValueNodePtr prim_anf_node = use_cnode->input(0)->cast<ValueNodePtr>();
  469. MS_EXCEPTION_IF_NULL(prim_anf_node);
  470. PrimitivePtr use_cnode_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  471. MS_EXCEPTION_IF_NULL(use_cnode_prim);
  472. if (use_cnode_prim->name() == DEPEND && node_pair.second != 1) {
  473. continue;
  474. }
  475. if (IsParallelCareNode(use_cnode)) {
  476. SplitTensor(node, use_cnode, node_pair.second);
  477. }
  478. }
  479. }
  480. std::vector<AnfNodePtr> ReplaceOpInput(const Operator &replace_op, const std::string &instance_name,
  481. const CNodePtr &node) {
  482. OperatorArgs arg_replace_op = replace_op.second;
  483. ValuePtr pyop_instance = CreatOpInstance(arg_replace_op.first, replace_op.first, instance_name);
  484. if (pyop_instance == nullptr) {
  485. MS_LOG(EXCEPTION) << "Failure: " << replace_op.first << " CreatOpInstance failed";
  486. }
  487. OperatorParams params = arg_replace_op.second;
  488. if (node->inputs().size() < 2) {
  489. // GetNext operator dose not has input
  490. if (node->inputs().size() == 1) {
  491. return {NewValueNode(pyop_instance)};
  492. }
  493. MS_LOG(EXCEPTION) << "Failure: " << node->ToString() << " size is smaller than 2";
  494. }
  495. std::vector<AnfNodePtr> replace_input = {NewValueNode(pyop_instance), node->input(1)};
  496. auto prim = GetValueNode<PrimitivePtr>(node->input(0));
  497. if (prim->name() == GATHERV2 || prim->name() == SPARSE_GATHERV2) {
  498. replace_input = {NewValueNode(pyop_instance), node->input(1), node->input(2)};
  499. }
  500. if (!params.empty()) {
  501. Param param_first = *(params.begin());
  502. int32_t first_position = param_first.second;
  503. if (first_position == 1) {
  504. replace_input.pop_back();
  505. }
  506. for (auto &param : params) {
  507. AnfNodePtr val = NewValueNode(param.first.second);
  508. if (val == nullptr) {
  509. MS_LOG(EXCEPTION) << "Failure:val is nullptr";
  510. }
  511. int32_t position = param.second;
  512. (void)replace_input.insert(replace_input.begin() + position, val);
  513. }
  514. }
  515. return replace_input;
  516. }
  517. void ReplaceOneOp(const Operator &replace_op, const CNodePtr &node) {
  518. FuncGraphPtr func_graph = node->func_graph();
  519. MS_EXCEPTION_IF_NULL(func_graph);
  520. FuncGraphManagerPtr manager = func_graph->manager();
  521. if (manager == nullptr) {
  522. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  523. }
  524. std::string instance_name = CreateInstanceName(node, 0);
  525. std::vector<AnfNodePtr> replace_input;
  526. replace_input = ReplaceOpInput(replace_op, instance_name, node);
  527. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  528. MS_EXCEPTION_IF_NULL(replace_node);
  529. ScopePtr scope = node->scope();
  530. MS_EXCEPTION_IF_NULL(scope);
  531. replace_node->set_scope(scope);
  532. replace_node->set_in_forward_flag(true);
  533. replace_input[0]->set_scope(scope);
  534. (void)manager->Replace(node, replace_node);
  535. }
  536. void StepReplaceOp(OperatorVector replace_op, const CNodePtr &node) {
  537. // step1:get graph manager distribute_operator
  538. OperatorInfoPtr distribute_operator = node->operator_info();
  539. if (distribute_operator == nullptr) {
  540. MS_LOG(EXCEPTION) << "Failure:AddNode error since distribute_operator is nullptr";
  541. }
  542. FuncGraphPtr func_graph = node->func_graph();
  543. MS_EXCEPTION_IF_NULL(func_graph);
  544. FuncGraphManagerPtr manager = func_graph->manager();
  545. if (manager == nullptr) {
  546. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  547. }
  548. // step2:traverse op_list and insert node
  549. std::reverse(replace_op.begin(), replace_op.end());
  550. auto replace_op_info = distribute_operator->replace_op_info();
  551. std::reverse(replace_op_info.begin(), replace_op_info.end());
  552. if (!replace_op_info.empty() && replace_op_info.size() != replace_op.size()) {
  553. MS_LOG(EXCEPTION) << "replace_op_info is not empty and size not equal to replace_op!";
  554. }
  555. bool replace_op_info_flag = !replace_op_info.empty();
  556. for (size_t index = 0; index < replace_op.size(); ++index) {
  557. std::string instance_name = CreateInstanceName(node, index);
  558. std::vector<AnfNodePtr> replace_input;
  559. if (index != replace_op.size() - 1) {
  560. replace_input = CreateInput(replace_op[index], node, instance_name);
  561. } else {
  562. replace_input = ReplaceOpInput(replace_op[index], instance_name, node);
  563. }
  564. CNodePtr replace_node = func_graph->NewCNode(replace_input);
  565. MS_EXCEPTION_IF_NULL(replace_node);
  566. ScopePtr scope = node->scope();
  567. MS_EXCEPTION_IF_NULL(scope);
  568. replace_node->set_scope(scope);
  569. if (index == replace_op.size() - 1) {
  570. (void)replace_node->set_operator_info(node->operator_info());
  571. }
  572. replace_node->set_in_forward_flag(true);
  573. replace_input[0]->set_scope(scope);
  574. if (replace_op_info_flag && replace_op_info[index].first) {
  575. auto new_cnode = InsertMakeTuple(replace_node, replace_op_info[index].second, func_graph);
  576. (void)manager->Replace(node, new_cnode); // using Replace function to insert node
  577. } else {
  578. (void)manager->Replace(node, replace_node); // using Replace function to insert node
  579. }
  580. }
  581. MS_LOG(INFO) << "Insert ReplaceOp success for " << distribute_operator->name();
  582. }
  583. bool IsSomePrimitive(const CNodePtr &cnode, const std::string &name) {
  584. ValueNodePtr anf_node = cnode->input(0)->cast<ValueNodePtr>();
  585. MS_EXCEPTION_IF_NULL(anf_node);
  586. PrimitivePtr prim = anf_node->value()->cast<PrimitivePtr>();
  587. return (prim->name() == name);
  588. }
  589. void StepReplaceGraph(const ReplaceGraphPtr &replace_graph, const CNodePtr &node) {
  590. MS_EXCEPTION_IF_NULL(replace_graph);
  591. MS_EXCEPTION_IF_NULL(node);
  592. MS_EXCEPTION_IF_NULL(replace_graph->second);
  593. FuncGraphPtr func_graph = node->func_graph();
  594. MS_EXCEPTION_IF_NULL(func_graph);
  595. FuncGraphManagerPtr manager = func_graph->manager();
  596. if (manager == nullptr) {
  597. MS_LOG(EXCEPTION) << "Failure:AddNode error since manager is nullptr";
  598. }
  599. for (auto &replace_input : replace_graph->first) {
  600. auto pre_node = node->input(IntToSize(replace_input.second));
  601. manager->SetEdge(replace_input.first, 1, pre_node);
  602. }
  603. // "(void)manager->Replace(replace_graph->first, pre_node);" can not be called
  604. auto replace_output = replace_graph->second;
  605. MS_EXCEPTION_IF_NULL(replace_output);
  606. (void)manager->Replace(node, replace_output);
  607. }
  608. int32_t GetTupleGetItemIndex(const CNodePtr &cnode) {
  609. MS_EXCEPTION_IF_NULL(cnode);
  610. if (cnode->inputs().size() != 3) {
  611. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is not 3";
  612. }
  613. if (!cnode->input(2)->isa<ValueNode>()) {
  614. MS_LOG(EXCEPTION) << "The index of tuple getitem is not a value node";
  615. }
  616. ValuePtr tuple_index_value = GetValueNode(cnode->input(2));
  617. MS_EXCEPTION_IF_NULL(tuple_index_value);
  618. if (!tuple_index_value->isa<Int32Imm>()) {
  619. MS_LOG(EXCEPTION) << "The index of tuple getitem is not int32";
  620. }
  621. return tuple_index_value->cast<Int32ImmPtr>()->value();
  622. }
  623. // Judge whether the node is a loss, and if there are multiple outputs,
  624. // get which output is a grad according to the tuple getitem.
  625. // Currently, it is not supported that the sens is a tuple.
  626. LossNodeInfo GetLossNodeInfo(const AnfNodePtr &loss_node) {
  627. MS_EXCEPTION_IF_NULL(loss_node);
  628. FuncGraphPtr sub_graph = loss_node->func_graph();
  629. MS_EXCEPTION_IF_NULL(sub_graph);
  630. CNodePtr return_node = sub_graph->get_return();
  631. MS_EXCEPTION_IF_NULL(return_node);
  632. if (return_node->inputs().size() < 2) {
  633. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  634. }
  635. AnfNodePtr pre_node = return_node->input(1);
  636. MS_EXCEPTION_IF_NULL(pre_node);
  637. LossNodeInfo node_info;
  638. // return -> cast
  639. auto pre_cnode = pre_node->cast<CNodePtr>();
  640. MS_EXCEPTION_IF_NULL(pre_cnode);
  641. auto pre_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  642. if (pre_prim->name() == CAST && pre_cnode->operator_info() == nullptr) {
  643. pre_node = pre_cnode->input(1);
  644. }
  645. // return -> loss
  646. if (pre_node == loss_node) {
  647. node_info.has_tuple_getitem = false;
  648. node_info.dout_index = 0;
  649. return node_info;
  650. }
  651. // return -> tuple_getitem -> loss
  652. auto cnode = pre_node->cast<CNodePtr>();
  653. MS_EXCEPTION_IF_NULL(cnode);
  654. auto current_value = cnode->input(0)->cast<ValueNodePtr>();
  655. MS_EXCEPTION_IF_NULL(current_value);
  656. PrimitivePtr current_prim = current_value->value()->cast<PrimitivePtr>();
  657. MS_EXCEPTION_IF_NULL(current_prim);
  658. // size of common cnode is larger than 1
  659. if (cnode->inputs().size() < 2) {
  660. MS_LOG(EXCEPTION) << cnode->ToString() << " size( " << cnode->inputs().size() << " ) is smaller than 2";
  661. }
  662. if ((current_prim->name() == TUPLE_GETITEM) && (cnode->input(1) == loss_node)) {
  663. // size of tuple_getitem cnode is 3
  664. auto tuple_index = GetTupleGetItemIndex(cnode);
  665. node_info.has_tuple_getitem = true;
  666. node_info.dout_index = tuple_index;
  667. return node_info;
  668. }
  669. MS_LOG(EXCEPTION) << "Invalid loss";
  670. }
  671. void InsertVirtualDivOp(const VirtualDivOp &virtual_div_op, const CNodePtr &node) {
  672. MS_EXCEPTION_IF_NULL(node);
  673. size_t node_size = node->inputs().size();
  674. FuncGraphPtr func_graph = node->func_graph();
  675. MS_EXCEPTION_IF_NULL(func_graph);
  676. FuncGraphManagerPtr manager = func_graph->manager();
  677. MS_EXCEPTION_IF_NULL(manager);
  678. for (size_t index = 1; index < node_size; ++index) {
  679. AnfNodePtr input = node->input(index);
  680. MS_EXCEPTION_IF_NULL(input);
  681. if (!input->isa<CNode>() && !input->isa<Parameter>()) { // if it is not a tensor, continue
  682. MS_LOG(INFO) << "insert div op: the index " << index << " is not tensor, skip";
  683. continue;
  684. }
  685. for (size_t pos = 0; pos < virtual_div_op.size(); ++pos) {
  686. std::string instance_name = CreateInstanceName(node, pos);
  687. InsertNode(virtual_div_op[pos], node, index, node->input(index), func_graph, instance_name);
  688. }
  689. MS_LOG(INFO) << "insert div op for input index " << index << " of node";
  690. }
  691. }
  692. std::pair<AnfNodePtr, bool> FindParameter(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  693. if (!node->isa<Parameter>() && !node->isa<CNode>() && !node->isa<ValueNode>()) {
  694. return std::make_pair(nullptr, false);
  695. } else if (node->isa<Parameter>()) {
  696. return std::make_pair(node, false);
  697. } else if (node->isa<ValueNode>()) {
  698. if (IsValueNode<RefKey>(node)) {
  699. std::vector<AnfNodePtr> param_v = FindParameterByRefKeyNode(node, func_graph);
  700. if (param_v.size() != 1) {
  701. MS_LOG(EXCEPTION) << "FindParameterByRefKeyNode failed, return vector size must be 1, real is "
  702. << param_v.size();
  703. }
  704. return std::make_pair(node, true);
  705. }
  706. return std::make_pair(nullptr, false);
  707. } else {
  708. CNodePtr cnode = node->cast<CNodePtr>();
  709. MS_EXCEPTION_IF_NULL(cnode);
  710. if (!IsValueNode<Primitive>(cnode->input(0))) {
  711. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  712. if (!FindParameter(cnode->input(index), func_graph).first) {
  713. continue;
  714. }
  715. return FindParameter(cnode->input(index), func_graph);
  716. }
  717. } else {
  718. if (IsParallelCareNode(cnode)) {
  719. return std::make_pair(nullptr, false);
  720. } else {
  721. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  722. MS_EXCEPTION_IF_NULL(prim_anf_node);
  723. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  724. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  725. MS_EXCEPTION_IF_NULL(prim);
  726. if (prim->name() == DEPEND && index != 1) {
  727. continue;
  728. }
  729. if (!FindParameter(cnode->input(index), func_graph).first) {
  730. continue;
  731. }
  732. return FindParameter(cnode->input(index), func_graph);
  733. }
  734. }
  735. }
  736. }
  737. return std::make_pair(nullptr, false);
  738. }
  739. std::pair<bool, CNodePtr> FindCNode(const AnfNodePtr &anode, const std::string &name, const FuncGraphPtr &func_graph) {
  740. MS_EXCEPTION_IF_NULL(anode);
  741. MS_EXCEPTION_IF_NULL(anode->func_graph());
  742. FuncGraphManagerPtr manager = anode->func_graph()->manager();
  743. MS_EXCEPTION_IF_NULL(manager);
  744. AnfNodeIndexSet node_set = manager->node_users()[anode];
  745. bool result = false;
  746. CNodePtr cnode_return = nullptr;
  747. for (auto &node_pair : node_set) {
  748. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  749. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  750. continue;
  751. }
  752. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  753. MS_EXCEPTION_IF_NULL(prim_anf_node);
  754. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  755. MS_EXCEPTION_IF_NULL(node_prim);
  756. if (node_prim->name() == name && node_pair.second == 1) {
  757. if (use_apply->func_graph() == func_graph) {
  758. result = true;
  759. cnode_return = use_apply;
  760. MS_LOG(INFO) << "Find Primitive " << name << " in the same func_graph";
  761. continue;
  762. }
  763. MS_LOG(INFO) << "Find Primitive " << name << " in different func_graph";
  764. }
  765. }
  766. return std::make_pair(result, cnode_return);
  767. }
  768. bool IsCastBeforMirror(const CNodePtr &node, size_t index) {
  769. // only if cast_before_mirror is true, pre node is cast and type is not float32 return true
  770. if (!ParallelContext::GetInstance()->cast_before_mirror()) {
  771. return false;
  772. }
  773. auto pre_node = node->input(index);
  774. MS_EXCEPTION_IF_NULL(pre_node);
  775. auto cnode = pre_node->cast<CNodePtr>();
  776. if (cnode == nullptr || !IsValueNode<Primitive>(cnode->input(0))) {
  777. return false;
  778. }
  779. auto pre_value_node = cnode->input(0)->cast<ValueNodePtr>();
  780. MS_EXCEPTION_IF_NULL(pre_value_node);
  781. auto pre_prim = pre_value_node->value()->cast<PrimitivePtr>();
  782. MS_EXCEPTION_IF_NULL(pre_prim);
  783. if (pre_prim->name() != CAST) {
  784. return false;
  785. }
  786. auto node_type = pre_node->Type();
  787. MS_EXCEPTION_IF_NULL(node_type);
  788. if (!node_type->isa<mindspore::TensorType>()) {
  789. MS_LOG(EXCEPTION) << "Unknown type.";
  790. }
  791. auto input_element_type = node_type->cast<mindspore::TensorTypePtr>()->element();
  792. MS_EXCEPTION_IF_NULL(input_element_type);
  793. auto type_id = input_element_type->type_id();
  794. return (type_id != kNumberTypeFloat32);
  795. }
  796. void InsertMirrorOps(const MirrorOps &mirror_ops, const CNodePtr &node) {
  797. MS_EXCEPTION_IF_NULL(node);
  798. size_t node_size = node->inputs().size();
  799. FuncGraphPtr func_graph = node->func_graph();
  800. MS_EXCEPTION_IF_NULL(func_graph);
  801. FuncGraphManagerPtr manager = func_graph->manager();
  802. MS_EXCEPTION_IF_NULL(manager);
  803. if (mirror_ops.size() != node_size - 1) {
  804. MS_LOG(EXCEPTION) << "Failure:Mirrorops's size is wrong! mirror_ops size is " << mirror_ops.size()
  805. << ", node_size is " << node_size;
  806. }
  807. for (size_t index = 1; index < node_size; ++index) {
  808. OperatorVector backward_op = mirror_ops[index - 1];
  809. if (backward_op.empty()) {
  810. continue;
  811. }
  812. std::pair<AnfNodePtr, bool> param_node_pair = FindParameter(node->input(index), func_graph);
  813. if (!param_node_pair.first) {
  814. continue;
  815. }
  816. // not a RefKey
  817. if (!param_node_pair.second) {
  818. auto next_cnode = FindCNode(param_node_pair.first, MIRROR_OPERATOR, func_graph);
  819. // if there is already a MirrorOp in the same graph, use MirrorOp CNode as a input instead
  820. if (next_cnode.first) {
  821. MS_EXCEPTION_IF_NULL(next_cnode.second);
  822. manager->SetEdge(node, SizeToInt(index), next_cnode.second);
  823. continue;
  824. }
  825. }
  826. // if the parameter found is a RefKey, or no MirrorOp is found in the same graph, insert a new MirrorOp
  827. // only one MirrorOp in backward_op
  828. if (backward_op.size() != 1) {
  829. MS_LOG(EXCEPTION) << "backward_op size must be 1, real is " << backward_op.size();
  830. }
  831. std::string instance_name = MIRROR_OP;
  832. if (IsCastBeforMirror(node, index)) {
  833. for (auto &op : backward_op) {
  834. // insert new node before the node
  835. CNodePtr cnode = node->input(index)->cast<CNodePtr>();
  836. MS_EXCEPTION_IF_NULL(cnode);
  837. AnfNodePtr pre_node = cnode->input(1);
  838. InsertNode(op, cnode, size_t(1), pre_node, func_graph, instance_name);
  839. }
  840. } else {
  841. for (auto &op : backward_op) {
  842. AnfNodePtr pre_node = node->input(index);
  843. InsertNode(op, node, index, pre_node, func_graph, instance_name);
  844. }
  845. }
  846. }
  847. }
  848. void BackwardCommunication(const OperatorInfoPtr &distribute_operator, const CNodePtr &node,
  849. const std::vector<std::pair<CNodePtr, CNodePtr>> &sens_loss_pairs) {
  850. MS_EXCEPTION_IF_NULL(distribute_operator);
  851. MS_EXCEPTION_IF_NULL(node);
  852. bool is_loss_cnode =
  853. std::any_of(sens_loss_pairs.begin(), sens_loss_pairs.end(),
  854. [node](const std::pair<CNodePtr, CNodePtr> &element) { return element.second == node; });
  855. MirrorOps mirror_ops = distribute_operator->mirror_ops();
  856. VirtualDivOp virtual_div_op = distribute_operator->virtual_div_op();
  857. // insert mirror op
  858. if (!mirror_ops.empty()) {
  859. MS_LOG(INFO) << "insert mirror op for " << distribute_operator->name();
  860. InsertMirrorOps(mirror_ops, node);
  861. }
  862. // insert virtual div op
  863. if (!virtual_div_op.empty() && is_loss_cnode) {
  864. MS_LOG(INFO) << "insert virtual div op for " << distribute_operator->name();
  865. InsertVirtualDivOp(virtual_div_op, node);
  866. }
  867. }
  868. std::string GetDisOpName(const std::string &prim_name) {
  869. std::string op_name = prim_name;
  870. if (!prim_name.empty() && (prim_name[0] == '_')) {
  871. op_name = prim_name.substr(1);
  872. }
  873. return op_name + "Info";
  874. }
  875. OperatorInfoPtr OperatorInstanceByName(const std::string &name, const PrimitiveAttrs &attrs,
  876. const std::vector<Shapes> &shape_list) {
  877. if (shape_list.size() != 2) {
  878. MS_LOG(ERROR) << "The size of shape list is not 2";
  879. return nullptr;
  880. }
  881. if (name.length() == 0) {
  882. MS_LOG(EXCEPTION) << "Length of name is zero!";
  883. }
  884. std::string distribute_opname = GetDisOpName(name);
  885. if (name == GATHERV2) {
  886. distribute_opname = name + "PInfo";
  887. auto data_parallel_iter = attrs.find(DATA_PARALLEL);
  888. if (data_parallel_iter != attrs.end()) {
  889. MS_EXCEPTION_IF_NULL(data_parallel_iter->second);
  890. if (!data_parallel_iter->second->isa<BoolImm>()) {
  891. MS_LOG(EXCEPTION) << ": data_parallel flag's type is not a bool.";
  892. }
  893. bool data_parallel = data_parallel_iter->second->cast<BoolImmPtr>()->value();
  894. if (data_parallel) {
  895. distribute_opname = name + "Info";
  896. }
  897. }
  898. }
  899. OperatorInfoPtr operator_ =
  900. (OperatorInfoPtr)DynCreator::Instance().Creat(distribute_opname, shape_list[0], shape_list[1], attrs, TOTAL_OPS);
  901. if (operator_ == nullptr) {
  902. MS_LOG(INFO) << "Creat " << name << " failed";
  903. return nullptr;
  904. }
  905. std::string origin_name = operator_->name();
  906. operator_->set_name(origin_name + std::to_string(TOTAL_OPS));
  907. MS_LOG(INFO) << "Successfully created operator " << origin_name;
  908. ++TOTAL_OPS;
  909. return operator_;
  910. }
  911. OperatorInfoPtr OperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  912. const std::vector<Shapes> &shape_list) {
  913. MS_EXCEPTION_IF_NULL(prim);
  914. OperatorInfoPtr operator_ = OperatorInstanceByName(prim->name(), attrs, shape_list);
  915. if (operator_ == nullptr) {
  916. MS_LOG(INFO) << "Creat " << prim->name() << " failed, use batch parallel";
  917. operator_ = OperatorInstanceByName(BATCH_PARALLEL, attrs, shape_list);
  918. MS_EXCEPTION_IF_NULL(operator_);
  919. }
  920. return operator_;
  921. }
  922. OperatorInfoPtr NewOperatorInstance(const PrimitivePtr &prim, const PrimitiveAttrs &attrs,
  923. std::vector<Shapes> shape_list) {
  924. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  925. for (size_t i = 0; i < shape_list[0].size(); ++i) {
  926. MS_LOG(INFO) << "No: " << i << " input's shape: " << ShapeToString(shape_list[0][i]);
  927. }
  928. return operator_;
  929. }
  930. StrategyPtr ExtractStrategy(std::unordered_map<std::string, ValuePtr> attrs) {
  931. ValueTuplePtr var = attrs[STRATEGY]->cast<ValueTuplePtr>();
  932. StrategyPtr strategyPtr;
  933. MS_LOG(INFO) << "Extract information: strategy " << attrs[STRATEGY]->ToString();
  934. if (var == nullptr) {
  935. MS_LOG(EXCEPTION) << "Strategy value is nullptr";
  936. }
  937. if (var->size() > 0) {
  938. std::vector<ValuePtr> elements = var->value();
  939. std::vector<Dimensions> strategy;
  940. for (uint32_t index = 0; index < elements.size(); ++index) {
  941. Dimensions dim;
  942. if (elements[index]->isa<ValueSequeue>()) {
  943. ValueTuplePtr value_tuple = elements[index]->cast<ValueTuplePtr>();
  944. std::vector<ValuePtr> value_vector = value_tuple->value();
  945. (void)std::transform(value_vector.begin(), value_vector.end(), std::back_inserter(dim),
  946. [](const ValuePtr &value) { return static_cast<int32_t>(GetValue<int>(value)); });
  947. strategy.push_back(dim);
  948. } else {
  949. MS_LOG(EXCEPTION) << "Failure:Strategy's format is wrong! Need ValueSequeue";
  950. }
  951. }
  952. if (strategy.empty()) {
  953. MS_LOG(EXCEPTION) << "ExtractStrategy:failed to extract strategy";
  954. }
  955. strategyPtr = NewStrategy(0, strategy);
  956. }
  957. return strategyPtr;
  958. }
  959. Shapes GetNodeShape(const AnfNodePtr &node) {
  960. MS_EXCEPTION_IF_NULL(node);
  961. Shapes shapes;
  962. BaseShapePtr base_shape_ptr = node->Shape();
  963. if (node->isa<CNode>()) {
  964. auto cnode = node->cast<CNodePtr>();
  965. if (IsValueNode<Primitive>(cnode->input(0))) {
  966. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  967. MS_EXCEPTION_IF_NULL(prim);
  968. if (prim->name() == MAKEREF) {
  969. AnfNodePtr ref_node = cnode->input(1);
  970. auto func_graph = cnode->func_graph();
  971. MS_EXCEPTION_IF_NULL(ref_node);
  972. MS_EXCEPTION_IF_NULL(func_graph);
  973. return GetRefKeyNodeShape(ref_node, func_graph);
  974. }
  975. }
  976. if (cnode->input(0)->isa<CNode>()) {
  977. if (cnode->inputs().size() < 2) {
  978. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " size is samller than 2";
  979. }
  980. base_shape_ptr = cnode->input(1)->Shape();
  981. }
  982. }
  983. if (base_shape_ptr == nullptr) {
  984. MS_LOG(EXCEPTION) << "GetNodeShape: " << node->ToString() << " shape_ptr is nullptr, full name is "
  985. << node->fullname_with_scope();
  986. }
  987. auto tuple_shape_ptr = dyn_cast<abstract::TupleShape>(base_shape_ptr);
  988. if (tuple_shape_ptr != nullptr) {
  989. auto tuple_shape = tuple_shape_ptr->shape();
  990. for (auto &shape : tuple_shape) {
  991. auto each_shape = dyn_cast<abstract::Shape>(shape);
  992. MS_EXCEPTION_IF_NULL(each_shape);
  993. shapes.push_back(each_shape->shape());
  994. }
  995. } else {
  996. auto shape_ptr = dyn_cast<abstract::Shape>(base_shape_ptr);
  997. MS_EXCEPTION_IF_NULL(shape_ptr);
  998. shapes.push_back(shape_ptr->shape());
  999. }
  1000. return shapes;
  1001. }
  1002. std::vector<AnfNodePtr> FindParameterByRefKeyNode(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1003. MS_EXCEPTION_IF_NULL(node);
  1004. MS_EXCEPTION_IF_NULL(func_graph);
  1005. std::vector<AnfNodePtr> parameters;
  1006. if (!IsValueNode<RefKey>(node)) {
  1007. MS_LOG(ERROR) << "The node is not a ref key";
  1008. return parameters;
  1009. }
  1010. auto ref_key = GetValueNode<RefKeyPtr>(node);
  1011. MS_EXCEPTION_IF_NULL(ref_key);
  1012. auto name = ref_key->tag();
  1013. auto manager = func_graph->manager();
  1014. MS_EXCEPTION_IF_NULL(manager);
  1015. auto roots = manager->roots();
  1016. if (roots.size() != 1) {
  1017. MS_LOG(ERROR) << "The size of roots ( " << roots.size() << " ) is not 1";
  1018. return parameters;
  1019. }
  1020. FuncGraphPtr root_g = roots.back();
  1021. MS_EXCEPTION_IF_NULL(root_g);
  1022. for (auto &param_node : root_g->parameters()) {
  1023. auto param = param_node->cast<ParameterPtr>();
  1024. if (param && (name == param->name())) {
  1025. parameters.push_back(param_node);
  1026. MS_LOG(INFO) << "The name of ref key is: " << name;
  1027. return parameters;
  1028. }
  1029. }
  1030. MS_LOG(ERROR) << "The name of ref key is: " << name << ", but have not found the parameter";
  1031. return parameters;
  1032. }
  1033. Shapes GetRefKeyNodeShape(const AnfNodePtr &node, const FuncGraphPtr &func_graph) {
  1034. MS_EXCEPTION_IF_NULL(node);
  1035. MS_EXCEPTION_IF_NULL(func_graph);
  1036. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(node, func_graph);
  1037. if (parameters.size() != 1) {
  1038. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1039. }
  1040. Shapes input_shapes;
  1041. input_shapes = GetNodeShape(parameters[0]);
  1042. if (input_shapes.size() != 1) {
  1043. MS_LOG(EXCEPTION) << "Get input shape failed";
  1044. }
  1045. MS_LOG(INFO) << "The parameter shape is " << ShapeToString(input_shapes[0]);
  1046. return input_shapes;
  1047. }
  1048. std::vector<Shapes> ExtractShape(const CNodePtr &node) {
  1049. MS_EXCEPTION_IF_NULL(node);
  1050. Shapes shape_inputs, shape_outputs;
  1051. std::vector<Shapes> shape_all;
  1052. std::vector<AnfNodePtr> all_inputs = node->inputs();
  1053. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  1054. size_t inputs_size = all_inputs.size();
  1055. for (size_t i = 1; i < inputs_size; ++i) {
  1056. Shapes input_shapes;
  1057. AnfNodePtr input = all_inputs[i];
  1058. if (IsValueNode<RefKey>(input)) {
  1059. auto func_graph = node->func_graph();
  1060. MS_EXCEPTION_IF_NULL(func_graph);
  1061. std::vector<AnfNodePtr> parameters = FindParameterByRefKeyNode(input, func_graph);
  1062. if (parameters.size() != 1) {
  1063. MS_LOG(EXCEPTION) << "Find parameter by ref key node failed";
  1064. }
  1065. std::pair<AnfNodePtr, int> node_pair = std::make_pair(node, SizeToInt(i));
  1066. g_RefMap[parameters[0]] = node_pair;
  1067. input_shapes = GetRefKeyNodeShape(input, func_graph);
  1068. } else if (IsValueNode<Tensor>(input) || input->isa<CNode>() || input->isa<Parameter>()) {
  1069. input_shapes = GetNodeShape(input);
  1070. } else {
  1071. continue;
  1072. }
  1073. if (input_shapes.size() != 1) {
  1074. MS_LOG(EXCEPTION) << "ExtractShape:Get input shape failed";
  1075. }
  1076. shape_inputs.push_back(input_shapes[0]);
  1077. }
  1078. shape_all.push_back(shape_inputs);
  1079. // extract out shape
  1080. shape_outputs = GetNodeShape(node);
  1081. shape_all.push_back(shape_outputs);
  1082. return shape_all;
  1083. }
  1084. std::pair<AnfNodePtr, int> FindParallelCareNode(const AnfNodePtr &node) {
  1085. MS_EXCEPTION_IF_NULL(node);
  1086. FuncGraphPtr func_graph = node->func_graph();
  1087. MS_EXCEPTION_IF_NULL(func_graph);
  1088. FuncGraphManagerPtr manager = func_graph->manager();
  1089. MS_EXCEPTION_IF_NULL(manager);
  1090. AnfNodeIndexSet node_set = manager->node_users()[node];
  1091. for (auto &node_pair : node_set) {
  1092. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1093. MS_EXCEPTION_IF_NULL(cnode);
  1094. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1095. continue;
  1096. }
  1097. ValueNodePtr prim_node_anf = cnode->input(0)->cast<ValueNodePtr>();
  1098. MS_EXCEPTION_IF_NULL(prim_node_anf);
  1099. PrimitivePtr node_prim = prim_node_anf->value()->cast<PrimitivePtr>();
  1100. MS_EXCEPTION_IF_NULL(node_prim);
  1101. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1102. continue;
  1103. }
  1104. if (IsParallelCareNode(cnode) && cnode->operator_info() != nullptr) {
  1105. return node_pair;
  1106. } else if (FindParallelCareNode(node_pair.first).first != nullptr) {
  1107. return FindParallelCareNode(node_pair.first);
  1108. }
  1109. }
  1110. return std::make_pair(nullptr, 0);
  1111. }
  1112. std::pair<AnfNodePtr, int> FindSubGraph(const FuncGraphPtr &graph, const AnfNodePtr &parameter) {
  1113. MS_EXCEPTION_IF_NULL(graph);
  1114. MS_EXCEPTION_IF_NULL(parameter);
  1115. FuncGraphManagerPtr manager = graph->manager();
  1116. MS_EXCEPTION_IF_NULL(manager);
  1117. std::pair<AnfNodePtr, int> prim_anf_node_pair = FindParallelCareNode(parameter);
  1118. if (prim_anf_node_pair.first != nullptr) {
  1119. return prim_anf_node_pair;
  1120. } else {
  1121. AnfNodeIndexSet param_sub_set = manager->node_users()[parameter];
  1122. for (auto &param_pair : param_sub_set) {
  1123. CNodePtr graph_cnode = param_pair.first->cast<CNodePtr>();
  1124. if ((graph_cnode == nullptr) || !graph_cnode->input(0)->isa<CNode>()) {
  1125. continue;
  1126. }
  1127. CNodePtr graph_cnode_inp0 = graph_cnode->input(0)->cast<CNodePtr>();
  1128. if (!IsValueNode<FuncGraph>(graph_cnode_inp0->input(1))) {
  1129. continue;
  1130. }
  1131. FuncGraphPtr graph_sub = GetValueNode<FuncGraphPtr>(graph_cnode_inp0->input(1));
  1132. auto parameters = graph_sub->parameters();
  1133. if (IntToSize(param_pair.second - 1) >= parameters.size()) {
  1134. MS_LOG(EXCEPTION) << "The index is out of range, index is " << param_pair.second - 1 << ", vector size is "
  1135. << parameters.size();
  1136. }
  1137. std::pair<AnfNodePtr, int> res = FindSubGraph(graph_sub, parameters[IntToSize(param_pair.second - 1)]);
  1138. if (res.first != nullptr) {
  1139. return res;
  1140. }
  1141. }
  1142. }
  1143. return std::make_pair(nullptr, 0);
  1144. }
  1145. void SetParallelShape(const AnfNodePtr &parameter, const std::pair<AnfNodePtr, int> &res) {
  1146. MS_EXCEPTION_IF_NULL(parameter);
  1147. AbstractBasePtr abstract = parameter->abstract();
  1148. MS_EXCEPTION_IF_NULL(abstract);
  1149. MS_LOG(DEBUG) << "SetParallelShape " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1150. CNodePtr cnode = res.first->cast<CNodePtr>();
  1151. MS_EXCEPTION_IF_NULL(cnode);
  1152. OperatorInfoPtr distribute_operator = cnode->operator_info();
  1153. if (distribute_operator == nullptr) {
  1154. MS_LOG(EXCEPTION) << "Failure:node " << cnode->ToString() << " 's OperatorInfoPtr is nullptr";
  1155. }
  1156. if (IntToSize(res.second - 1) >= distribute_operator->inputs_tensor_info().size()) {
  1157. MS_LOG(EXCEPTION) << "The index is out of range, index is " << res.second - 1 << ", vector size is "
  1158. << distribute_operator->inputs_tensor_info().size();
  1159. }
  1160. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(res.second - 1)];
  1161. Shape slice_shape = tensorinfo_in.slice_shape();
  1162. MS_LOG(DEBUG) << "SetParallelShape slice_shape " << parameter->ToString() << " shape "
  1163. << MakeValue(slice_shape)->ToString();
  1164. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1165. MS_EXCEPTION_IF_NULL(parallel_shape);
  1166. // Don't modify it in-place as the pointer of this AbstractValue may used as cache key in StaticAnalysis.
  1167. auto cloned_abstract = abstract->Clone();
  1168. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1169. cloned_abstract->set_shape(parallel_shape);
  1170. parameter->set_abstract(cloned_abstract);
  1171. TensorLayout tensor_layout = tensorinfo_in.tensor_layout();
  1172. ParameterPtr parameter_ptr = parameter->cast<ParameterPtr>();
  1173. MS_EXCEPTION_IF_NULL(parameter_ptr);
  1174. parameter_ptr->set_tensor_layout(std::make_shared<TensorLayout>(tensor_layout));
  1175. }
  1176. void CoverSliceShape(const FuncGraphPtr &root) {
  1177. MS_EXCEPTION_IF_NULL(root);
  1178. auto parameters = root->parameters();
  1179. for (auto &parameter : parameters) {
  1180. MS_EXCEPTION_IF_NULL(parameter->Shape());
  1181. auto iter = g_RefMap.find(parameter);
  1182. if (iter != g_RefMap.end()) {
  1183. SetParallelShape(parameter, g_RefMap[parameter]);
  1184. continue;
  1185. }
  1186. std::pair<AnfNodePtr, int> res = FindSubGraph(root, parameter);
  1187. if (res.first == nullptr) {
  1188. MS_LOG(INFO) << "Parameter " << parameter->ToString() << " don't need to set parallel shape";
  1189. } else {
  1190. SetParallelShape(parameter, res);
  1191. MS_LOG(DEBUG) << "Parameter " << parameter->ToString() << " shape " << parameter->Shape()->ToString();
  1192. }
  1193. }
  1194. g_RefMap.clear();
  1195. }
  1196. bool ParameterIsCloned(const FuncGraphPtr &root, const AnfNodePtr &parameter_node) {
  1197. MS_EXCEPTION_IF_NULL(root);
  1198. MS_EXCEPTION_IF_NULL(parameter_node);
  1199. FuncGraphManagerPtr manager = root->manager();
  1200. MS_EXCEPTION_IF_NULL(manager);
  1201. auto cloned_parameter = parameter_node->cast<ParameterPtr>();
  1202. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1203. // find the clone parameter
  1204. if (!cloned_parameter->has_default()) {
  1205. return false;
  1206. }
  1207. auto param_value = std::dynamic_pointer_cast<ParamValuePy>(cloned_parameter->default_param());
  1208. py::object clone_info = parse::python_adapter::GetPyObjAttr(param_value->value(), CLONE_INFO);
  1209. bool cloned = py::cast<bool>(parse::python_adapter::GetPyObjAttr(clone_info, CLONED));
  1210. if (!cloned) {
  1211. return false;
  1212. }
  1213. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name() << " is cloned";
  1214. return true;
  1215. }
  1216. void SetClonedTensorShapeForOptimizer(const FuncGraphPtr &root) {
  1217. MS_EXCEPTION_IF_NULL(root);
  1218. for (auto &cloned_parameter_node : root->parameters()) {
  1219. MS_EXCEPTION_IF_NULL(cloned_parameter_node);
  1220. auto cloned_parameter = cloned_parameter_node->cast<ParameterPtr>();
  1221. MS_EXCEPTION_IF_NULL(cloned_parameter);
  1222. if (!ParameterIsCloned(root, cloned_parameter_node)) {
  1223. continue;
  1224. }
  1225. // get the cloned index
  1226. auto param_value = std::dynamic_pointer_cast<ParamValuePy>(cloned_parameter->default_param());
  1227. py::object cloned_info = parse::python_adapter::GetPyObjAttr(param_value->value(), CLONE_INFO);
  1228. int32_t cloned_index = py::cast<int32_t>(parse::python_adapter::GetPyObjAttr(cloned_info, CLONED_INDEX));
  1229. // find the be cloned parameter
  1230. bool found_be_cloned_parameter = false;
  1231. ParameterPtr cloned_from_parameter = nullptr;
  1232. AnfNodePtr cloned_from_node = nullptr;
  1233. for (auto &be_cloned_parameter_node : root->parameters()) {
  1234. MS_EXCEPTION_IF_NULL(be_cloned_parameter_node);
  1235. auto be_cloned_parameter = be_cloned_parameter_node->cast<ParameterPtr>();
  1236. MS_EXCEPTION_IF_NULL(be_cloned_parameter);
  1237. if (!be_cloned_parameter->has_default()) {
  1238. continue;
  1239. }
  1240. auto param_value_cloned = std::dynamic_pointer_cast<ParamValuePy>(be_cloned_parameter->default_param());
  1241. py::object be_cloned_info = parse::python_adapter::GetPyObjAttr(param_value_cloned->value(), CLONE_INFO);
  1242. if (!py::cast<bool>(parse::python_adapter::GetPyObjAttr(be_cloned_info, BE_CLONED))) {
  1243. continue;
  1244. }
  1245. // get the be cloned index
  1246. py::list be_cloned_index = parse::python_adapter::GetPyObjAttr(be_cloned_info, BE_CLONED_INDEX);
  1247. for (auto &index : be_cloned_index) {
  1248. if (cloned_index == py::cast<int32_t>(index)) {
  1249. found_be_cloned_parameter = true;
  1250. cloned_from_parameter = be_cloned_parameter;
  1251. cloned_from_node = be_cloned_parameter_node;
  1252. break;
  1253. }
  1254. }
  1255. }
  1256. if (found_be_cloned_parameter) {
  1257. // set the shape and tensor layout for cloned parameter
  1258. cloned_parameter->set_tensor_layout(cloned_from_parameter->tensor_layout());
  1259. MS_EXCEPTION_IF_NULL(cloned_parameter_node->abstract());
  1260. MS_EXCEPTION_IF_NULL(cloned_from_node->abstract());
  1261. auto cloned_abstract = cloned_parameter_node->abstract()->Clone();
  1262. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1263. cloned_abstract->set_shape(cloned_from_node->abstract()->GetShapeTrack());
  1264. cloned_parameter_node->set_abstract(cloned_abstract);
  1265. MS_LOG(INFO) << "The parameter: " << cloned_parameter->name()
  1266. << " is cloned, the be cloned parameter is: " << cloned_from_parameter->name()
  1267. << ", clone index is: " << cloned_index;
  1268. } else {
  1269. MS_LOG(EXCEPTION) << "The parameter: " << cloned_parameter->name() << " is cloned, cloned index is "
  1270. << cloned_index << ", but not found the be cloned parameter";
  1271. }
  1272. }
  1273. std::string env = common::GetEnv("SLICE_ENV");
  1274. if (!env.empty()) {
  1275. MS_LOG(INFO) << "Slice tensors shape will be configured from env:" << env;
  1276. abstract::InitUndeterminedFromEnv(env);
  1277. }
  1278. }
  1279. void SetVirtualDatasetStrategy(const CNodePtr &node) {
  1280. MS_EXCEPTION_IF_NULL(node);
  1281. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  1282. bool full_batch = ParallelContext::GetInstance()->full_batch();
  1283. PrimitivePtr prim = GetValueNode<PrimitivePtr>(node->input(0));
  1284. MS_EXCEPTION_IF_NULL(prim);
  1285. if (prim->name() == VIRTUAL_DATA_SET) {
  1286. CheckGlobalDeviceManager();
  1287. int32_t dev_num;
  1288. if (full_batch) {
  1289. dev_num = 1;
  1290. } else {
  1291. dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1292. }
  1293. auto attrs_temp = prim->attrs();
  1294. std::vector<Shapes> shape_list = ExtractShape(node);
  1295. if (shape_list.empty()) {
  1296. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1297. }
  1298. std::vector<ValuePtr> elements;
  1299. for (size_t i = 0; i < shape_list[0].size(); i++) {
  1300. if (shape_list[0][i].empty()) {
  1301. MS_LOG(EXCEPTION) << "shape_list[ " << i << " ].size() is zero";
  1302. }
  1303. std::vector<int32_t> input_strategy = {dev_num};
  1304. for (size_t j = 1; j < shape_list[0][i].size(); j++) {
  1305. input_strategy.push_back(1);
  1306. }
  1307. elements.push_back(MakeValue(input_strategy));
  1308. }
  1309. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1310. attrs_temp[STRATEGY] = strategy;
  1311. (void)prim->SetAttrs(attrs_temp);
  1312. }
  1313. }
  1314. void ExtractInformation(const std::vector<AnfNodePtr> &all_nodes) {
  1315. // load strategy map from checkpoint
  1316. StrategyMap stra_map;
  1317. if (StrategyCheckpoint::GetInstance().LoadCheckPointOn()) {
  1318. if (StrategyCheckpoint::GetInstance().Load(&stra_map) != SUCCESS) {
  1319. MS_LOG(EXCEPTION) << "Load strategy checkpoint failed";
  1320. }
  1321. }
  1322. for (auto &node : all_nodes) {
  1323. auto cnode = node->cast<CNodePtr>();
  1324. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1325. continue;
  1326. }
  1327. SetVirtualDatasetStrategy(cnode);
  1328. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1329. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1330. auto attrs = prim->attrs();
  1331. MS_LOG(INFO) << "extract information: node: " << node->ToString() << " prim " << prim->name();
  1332. if (IsParallelCareNode(cnode)) {
  1333. std::vector<Shapes> shape_list = ExtractShape(cnode);
  1334. if (shape_list.empty()) {
  1335. MS_LOG(EXCEPTION) << "Failure:node " << node->ToString() << " failed to extract shape";
  1336. }
  1337. OperatorInfoPtr operator_ = OperatorInstance(prim, attrs, shape_list);
  1338. if (operator_ == nullptr) {
  1339. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->name() << " OperatorInstance failed";
  1340. }
  1341. auto &inputs = cnode->inputs();
  1342. std::vector<ValuePtr> input_value;
  1343. for (size_t index = 1; index < inputs.size(); ++index) {
  1344. if (inputs[index]->isa<ValueNode>()) {
  1345. input_value.push_back(GetValueNode(inputs[index]));
  1346. } else {
  1347. input_value.emplace_back(nullptr);
  1348. }
  1349. }
  1350. StrategyPtr strategyPtr = nullptr;
  1351. (*operator_).set_input_value(input_value);
  1352. (*operator_).set_outputs_dtype(cnode->Type());
  1353. (*operator_).set_cnode(cnode);
  1354. if (prim->name() == RESHAPE) {
  1355. (void)cnode->set_operator_info(operator_);
  1356. continue;
  1357. }
  1358. // load strategy checkpoint
  1359. // key of strategy map
  1360. std::string strategy_key_name = NodeParameterName(cnode);
  1361. bool load_strategy_from_ckpt =
  1362. StrategyCheckpoint::GetInstance().LoadCheckPointOn() && stra_map.find(strategy_key_name) != stra_map.end();
  1363. if (!StrategyFound(attrs) && !load_strategy_from_ckpt) {
  1364. MS_LOG(INFO) << "ExtractInformation: the strategy of node " << node->ToString() << " prim " << prim->name()
  1365. << " is empty, using batch parallel";
  1366. std::shared_ptr<std::vector<Dimensions>> strategy_v_ptr = operator_->GenerateBatchStrategies();
  1367. if (strategy_v_ptr == nullptr) {
  1368. MS_LOG(EXCEPTION) << "Failure:Generate batch parallel strategy failed";
  1369. }
  1370. std::vector<ValuePtr> elements;
  1371. for (size_t i = 0; i < strategy_v_ptr->size(); i++) {
  1372. elements.push_back(MakeValue((*strategy_v_ptr)[i]));
  1373. }
  1374. ValueTuplePtr strategy = std::make_shared<ValueTuple>(elements);
  1375. // display the strategy generated by batch parallel
  1376. attrs[GEN_STRATEGY] = strategy;
  1377. (void)prim->SetAttrs(attrs);
  1378. MS_LOG(INFO) << "node " << node->ToString() << " prim " << prim->name() << " batch parallel strategy is "
  1379. << attrs[GEN_STRATEGY]->ToString();
  1380. strategyPtr = NewStrategy(0, *strategy_v_ptr);
  1381. } else if (load_strategy_from_ckpt) {
  1382. strategyPtr = stra_map[strategy_key_name];
  1383. } else {
  1384. strategyPtr = ExtractStrategy(attrs);
  1385. }
  1386. if (strategyPtr != nullptr) {
  1387. if (operator_->Init(strategyPtr) == FAILED) {
  1388. MS_LOG(EXCEPTION) << "Failure:operator " << prim->name() << " init failed";
  1389. }
  1390. (void)cnode->set_operator_info(operator_);
  1391. } else {
  1392. MS_LOG(EXCEPTION) << "ERROR:strategy_ptr is nullptr";
  1393. }
  1394. }
  1395. }
  1396. }
  1397. TensorLayout GetInputLayoutFromCNode(const std::pair<AnfNodePtr, int> &node_pair) {
  1398. CNodePtr cnode = node_pair.first->cast<CNodePtr>();
  1399. MS_EXCEPTION_IF_NULL(cnode);
  1400. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1401. MS_EXCEPTION_IF_NULL(distribute_operator);
  1402. int index = node_pair.second;
  1403. if (index > SizeToInt(distribute_operator->inputs_tensor_info().size())) {
  1404. MS_LOG(EXCEPTION) << "The index is out of range, the node_pair.second is " << index - 1 << ", the vector size is "
  1405. << distribute_operator->inputs_tensor_info().size();
  1406. }
  1407. TensorInfo tensorinfo_in = distribute_operator->inputs_tensor_info()[IntToSize(index - 1)];
  1408. TensorLayout tensorlayout_in = tensorinfo_in.tensor_layout();
  1409. return tensorlayout_in;
  1410. }
  1411. // if reshape's output connect to several primitive, return the first layout found
  1412. std::shared_ptr<TensorLayout> FindNextLayout(const CNodePtr &cnode) {
  1413. MS_EXCEPTION_IF_NULL(cnode);
  1414. MS_EXCEPTION_IF_NULL(cnode->func_graph());
  1415. FuncGraphManagerPtr manager = cnode->func_graph()->manager();
  1416. MS_EXCEPTION_IF_NULL(manager);
  1417. AnfNodeIndexSet node_set = manager->node_users()[cnode];
  1418. for (auto &node_pair : node_set) {
  1419. CNodePtr use_apply = node_pair.first->cast<CNodePtr>();
  1420. if (use_apply == nullptr || !IsValueNode<Primitive>(use_apply->input(0))) {
  1421. continue;
  1422. }
  1423. ValueNodePtr prim_anf_node = use_apply->input(0)->cast<ValueNodePtr>();
  1424. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1425. PrimitivePtr node_prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1426. MS_EXCEPTION_IF_NULL(node_prim);
  1427. MS_LOG(INFO) << "FindNextLayout prim " << node_prim->name();
  1428. if (node_prim->name() == DEPEND && node_pair.second != 1) {
  1429. continue;
  1430. }
  1431. if (IsParallelCareNode(use_apply) && (use_apply->operator_info() != nullptr)) {
  1432. MS_LOG(INFO) << "FindNextLayout success prim " << node_prim->name();
  1433. auto layout = GetInputLayoutFromCNode(node_pair);
  1434. return std::make_shared<TensorLayout>(layout);
  1435. }
  1436. MS_LOG(DEBUG) << "FindNextLayout failed prim " << node_prim->name() << " " << IsParallelCareNode(use_apply)
  1437. << " " << (use_apply->operator_info() != nullptr);
  1438. auto layout_ptr = FindNextLayout(use_apply);
  1439. if (layout_ptr) {
  1440. return layout_ptr;
  1441. }
  1442. }
  1443. MS_LOG(WARNING) << "FindNextLayout return nullptr, if reshape is not the last primitive, there must be some error";
  1444. return nullptr;
  1445. }
  1446. std::shared_ptr<TensorLayout> GetOutputLayoutFromCNode(const CNodePtr &cnode, size_t output_index) {
  1447. MS_EXCEPTION_IF_NULL(cnode);
  1448. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1449. MS_EXCEPTION_IF_NULL(distribute_operator);
  1450. if (distribute_operator->outputs_tensor_info().size() < output_index) {
  1451. MS_LOG(EXCEPTION) << "outputs_tensor_info size is " << distribute_operator->inputs_tensor_info().size()
  1452. << ", must be less than output_index " << output_index;
  1453. }
  1454. TensorInfo tensorinfo_out = distribute_operator->outputs_tensor_info()[output_index];
  1455. TensorLayout tensorlayout_out = tensorinfo_out.tensor_layout();
  1456. return std::make_shared<TensorLayout>(tensorlayout_out);
  1457. }
  1458. std::shared_ptr<TensorLayout> FindPrevParallelCareNodeLayout(const AnfNodePtr &node, size_t output_index) {
  1459. if (!node->isa<CNode>()) {
  1460. return nullptr;
  1461. }
  1462. CNodePtr cnode = node->cast<CNodePtr>();
  1463. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1464. return nullptr;
  1465. }
  1466. if (IsParallelCareNode(cnode) && (cnode->operator_info() != nullptr)) {
  1467. auto layout_ptr = GetOutputLayoutFromCNode(cnode, output_index);
  1468. if (!layout_ptr) {
  1469. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1470. }
  1471. return layout_ptr;
  1472. }
  1473. return nullptr;
  1474. }
  1475. std::shared_ptr<TensorLayout> CreateParameterLayout(const AnfNodePtr &node) {
  1476. // Create DataParallel tensor layout for parameter(support WideDeep).
  1477. CheckGlobalDeviceManager();
  1478. int32_t dev_num = SizeToInt(g_device_manager->GetDeviceListByStageId(0).size());
  1479. TensorLayout input_tensor_layout;
  1480. // create input_shape
  1481. Shapes inputs_shape = GetNodeShape(node);
  1482. Shape input_shape_array = inputs_shape[0];
  1483. if (input_shape_array.empty()) {
  1484. MS_LOG(EXCEPTION) << "Don't support reshape a scalar parameter.";
  1485. }
  1486. // create tensor_map
  1487. size_t shape_size = input_shape_array.size();
  1488. TensorMap input_tensor_map_array(SizeToInt(shape_size) - 1, -1);
  1489. input_tensor_map_array.insert(input_tensor_map_array.begin(), 0);
  1490. // create dev_matrix
  1491. Shape dev_matrix_array = {dev_num};
  1492. if (input_tensor_layout.InitFromVector(dev_matrix_array, input_tensor_map_array, input_shape_array) != SUCCESS) {
  1493. MS_LOG(EXCEPTION) << "Create tensor layout for parameter failed.";
  1494. }
  1495. return std::make_shared<TensorLayout>(input_tensor_layout);
  1496. }
  1497. std::shared_ptr<TensorLayout> FindPrevLayout(const AnfNodePtr &node) {
  1498. if (node->isa<Parameter>()) {
  1499. return CreateParameterLayout(node);
  1500. }
  1501. if (!node->isa<CNode>()) {
  1502. return nullptr;
  1503. }
  1504. CNodePtr cnode = node->cast<CNodePtr>();
  1505. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1506. return nullptr;
  1507. }
  1508. if (IsParallelCareNode(cnode) && (cnode->operator_info() != nullptr)) {
  1509. auto layout_ptr = GetOutputLayoutFromCNode(cnode, 0);
  1510. if (!layout_ptr) {
  1511. MS_LOG(EXCEPTION) << "Failure:GetLayoutFromCNode failed";
  1512. }
  1513. return layout_ptr;
  1514. }
  1515. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1516. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1517. if (prim->name() == TUPLE_GETITEM) {
  1518. auto tuple_index = GetTupleGetItemIndex(cnode);
  1519. auto layout_ptr = FindPrevParallelCareNodeLayout(cnode->input(1), IntToSize(tuple_index));
  1520. if (!layout_ptr) {
  1521. MS_LOG(EXCEPTION)
  1522. << " Failure:FindPrevLayout failed, tuple_getitem before reshape, but there does not exit a parallel care node "
  1523. "before tuple_getitem!";
  1524. }
  1525. return layout_ptr;
  1526. }
  1527. for (size_t index = 0; index < cnode->inputs().size(); ++index) {
  1528. if (prim->name() == DEPEND && index != 1) {
  1529. continue;
  1530. }
  1531. auto layout_ptr = FindPrevLayout(cnode->inputs()[index]);
  1532. if (!layout_ptr) {
  1533. continue;
  1534. }
  1535. return layout_ptr;
  1536. }
  1537. MS_LOG(WARNING) << "FindPrevLayout return nullptr, if reshape is not the first primitive, there must be some error";
  1538. return nullptr;
  1539. }
  1540. void ReshapeInit(const std::vector<AnfNodePtr> &all_nodes) {
  1541. for (auto &node : all_nodes) {
  1542. auto cnode = node->cast<CNodePtr>();
  1543. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1544. continue;
  1545. }
  1546. ValueNodePtr prim_anf_node = cnode->input(0)->cast<ValueNodePtr>();
  1547. if (!IsParallelCareNode(cnode) || (cnode->operator_info() == nullptr)) {
  1548. continue;
  1549. }
  1550. PrimitivePtr prim = GetValueNode<PrimitivePtr>(prim_anf_node);
  1551. MS_EXCEPTION_IF_NULL(prim);
  1552. OperatorInfoPtr operator_info = cnode->operator_info();
  1553. if (operator_info == nullptr) {
  1554. MS_LOG(EXCEPTION) << "Failure:Primitive " << prim->ToString() << " OperatorInstance is nullptr";
  1555. }
  1556. if (prim->name() != RESHAPE) {
  1557. continue;
  1558. }
  1559. auto attrs = prim->attrs();
  1560. if (StrategyFound(attrs)) {
  1561. MS_LOG(EXCEPTION) << "Setting strategy for Reshape goes for nothing!";
  1562. }
  1563. MS_ASSERT(cnode->inputs().size() == 3);
  1564. auto prev_layout_ptr = FindPrevLayout(cnode->input(1));
  1565. if (prev_layout_ptr) {
  1566. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1567. reshape_info_ptr->SetInputLayout(*prev_layout_ptr);
  1568. }
  1569. auto next_layout_ptr = FindNextLayout(cnode);
  1570. if (next_layout_ptr) {
  1571. auto reshape_info_ptr = std::dynamic_pointer_cast<ReshapeInfo>(operator_info);
  1572. reshape_info_ptr->SetOutputLayout(*next_layout_ptr);
  1573. }
  1574. if (operator_info->Init(nullptr) == FAILED) {
  1575. MS_LOG(EXCEPTION) << "Failure:operator " << prim->ToString() << " init failed";
  1576. }
  1577. }
  1578. }
  1579. CNodePtr FindLossCNode(const FuncGraphPtr &func_graph) {
  1580. MS_EXCEPTION_IF_NULL(func_graph);
  1581. CNodePtr return_node = func_graph->get_return();
  1582. MS_EXCEPTION_IF_NULL(return_node);
  1583. if (return_node->size() < 2) {
  1584. MS_LOG(EXCEPTION) << "Failure: " << return_node->ToString() << " size is smaller than 2";
  1585. }
  1586. AnfNodePtr pre_node = return_node->input(1);
  1587. MS_EXCEPTION_IF_NULL(pre_node);
  1588. auto pre_cnode = pre_node->cast<CNodePtr>();
  1589. if (pre_cnode == nullptr) {
  1590. return nullptr;
  1591. }
  1592. auto current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1593. // return -> cast
  1594. if (current_prim->name() == CAST && pre_cnode->operator_info() == nullptr) {
  1595. pre_cnode = pre_cnode->input(1)->cast<CNodePtr>();
  1596. MS_EXCEPTION_IF_NULL(pre_cnode);
  1597. current_prim = GetValueNode<PrimitivePtr>(pre_cnode->input(0));
  1598. }
  1599. // notice: the GetNext op has not input
  1600. if (INVALID_LOSS_OPS.find(current_prim->name()) != INVALID_LOSS_OPS.end()) {
  1601. MS_LOG(INFO) << "The loss is: " << current_prim->name();
  1602. return pre_cnode;
  1603. }
  1604. // size of common cnode is larger than 1
  1605. if (pre_cnode->size() < 2) {
  1606. MS_LOG(EXCEPTION) << pre_cnode->ToString() << " size( " << pre_cnode->inputs().size() << " ) is smaller than 2";
  1607. }
  1608. // return -> tuple_getitem -> loss
  1609. if (current_prim->name() == TUPLE_GETITEM) {
  1610. AnfNodePtr pre_pre_node = pre_cnode->input(1);
  1611. MS_EXCEPTION_IF_NULL(pre_pre_node);
  1612. auto pre_pre_cnode = pre_pre_node->cast<CNodePtr>();
  1613. auto value = pre_pre_cnode->input(0)->cast<ValueNodePtr>();
  1614. MS_EXCEPTION_IF_NULL(value);
  1615. PrimitivePtr prim = value->value()->cast<PrimitivePtr>();
  1616. MS_EXCEPTION_IF_NULL(prim);
  1617. MS_LOG(DEBUG) << "The loss name is " << prim->name();
  1618. return pre_pre_cnode;
  1619. }
  1620. // return -> make_tuple
  1621. if (current_prim->name() == MAKE_TUPLE) {
  1622. MS_LOG(EXCEPTION) << "The loss have make_tuple, it is not supported";
  1623. }
  1624. // return -> loss
  1625. MS_LOG(DEBUG) << "The loss name is " << current_prim->name();
  1626. return pre_cnode;
  1627. }
  1628. TensorLayouts GetLossNodeGradOutputLayout(const CNodePtr &loss_cnode) {
  1629. TensorLayouts ret;
  1630. MS_EXCEPTION_IF_NULL(loss_cnode);
  1631. AnfNodePtr node = loss_cnode->cast<AnfNodePtr>();
  1632. MS_EXCEPTION_IF_NULL(node);
  1633. LossNodeInfo node_info = GetLossNodeInfo(node);
  1634. ValueNodePtr prim_anf_node = loss_cnode->input(0)->cast<ValueNodePtr>();
  1635. MS_EXCEPTION_IF_NULL(prim_anf_node);
  1636. PrimitivePtr prim = prim_anf_node->value()->cast<PrimitivePtr>();
  1637. MS_EXCEPTION_IF_NULL(prim);
  1638. if (INVALID_LOSS_OPS.find(prim->name()) != INVALID_LOSS_OPS.end()) {
  1639. MS_LOG(WARNING) << "The loss name is: " << prim->name() << ", do nothing for split sens now";
  1640. return ret;
  1641. }
  1642. OperatorInfoPtr operator_info = loss_cnode->operator_info();
  1643. MS_EXCEPTION_IF_NULL(operator_info);
  1644. TensorInfo loss_grad_tensor_info;
  1645. size_t op_output_size = operator_info->outputs_tensor_info().size();
  1646. MS_LOG(INFO) << "The loss name is " << operator_info->name() << ", the has tuple item is "
  1647. << node_info.has_tuple_getitem << ", the output size is " << op_output_size << ", the dout_index is "
  1648. << node_info.dout_index;
  1649. if ((op_output_size == 0) || (op_output_size <= IntToSize(node_info.dout_index))) {
  1650. MS_LOG(EXCEPTION) << "The index is " << node_info.dout_index << ", but the size of outputs is " << op_output_size;
  1651. }
  1652. if (!node_info.has_tuple_getitem && (op_output_size > 1)) {
  1653. MS_LOG(EXCEPTION) << "Currently, it is not supported that the sens is a tuple.";
  1654. }
  1655. loss_grad_tensor_info = operator_info->outputs_tensor_info()[IntToSize(node_info.dout_index)];
  1656. ret.push_back(loss_grad_tensor_info.tensor_layout());
  1657. return ret;
  1658. }
  1659. void SplitSens(const CNodePtr &grad_sens_node, const TensorLayout &loss_grad_layout) {
  1660. MS_EXCEPTION_IF_NULL(grad_sens_node);
  1661. if (grad_sens_node->size() <= 1) {
  1662. MS_LOG(EXCEPTION) << "The size of grad sens node is smaller than 2";
  1663. }
  1664. AnfNodePtr sens_tensor_node = grad_sens_node->input(1);
  1665. MS_EXCEPTION_IF_NULL(sens_tensor_node);
  1666. Shapes sens_shapes = GetNodeShape(sens_tensor_node);
  1667. if (sens_shapes.size() != 1) {
  1668. MS_LOG(EXCEPTION) << "GetNodeShape for sens_tensor_node, output size is not 1";
  1669. }
  1670. // If the shape of sens tensor is [] or [1], no need to split it.
  1671. Shape sens_shape = sens_shapes[0];
  1672. if (sens_shape.empty() || ((sens_shape.size() == 1) && (sens_shape[0] == 1))) {
  1673. if (sens_tensor_node->isa<Parameter>()) {
  1674. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1675. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1676. sens_tensor_param->set_tensor_layout(std::make_shared<TensorLayout>(loss_grad_layout));
  1677. }
  1678. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", no need to split sens";
  1679. return;
  1680. }
  1681. auto loss_shape = loss_grad_layout.tensor_shape().array();
  1682. if (loss_shape != sens_shape) {
  1683. MS_LOG(EXCEPTION) << "The shape of sens is not equal to loss output, it is unsupported now. Sens shape is "
  1684. << ShapeToString(sens_shape) << ", loss shape is " << ShapeToString(loss_shape);
  1685. }
  1686. MS_LOG(INFO) << "The shape of sens is " << ShapeToString(sens_shape) << ", split it.";
  1687. if (!IsValueNode<Tensor>(sens_tensor_node)) {
  1688. if (sens_tensor_node->isa<Parameter>()) {
  1689. MS_LOG(DEBUG) << "loss layout " << loss_grad_layout.ToString();
  1690. AbstractBasePtr abstract = sens_tensor_node->abstract();
  1691. MS_EXCEPTION_IF_NULL(abstract);
  1692. auto slice_shape = loss_grad_layout.slice_shape().array();
  1693. std::shared_ptr<abstract::BaseShape> parallel_shape = std::make_shared<abstract::Shape>(slice_shape);
  1694. MS_EXCEPTION_IF_NULL(parallel_shape);
  1695. auto cloned_abstract = abstract->Clone();
  1696. MS_EXCEPTION_IF_NULL(cloned_abstract);
  1697. cloned_abstract->set_shape(parallel_shape);
  1698. sens_tensor_node->set_abstract(cloned_abstract);
  1699. auto sens_tensor_param = sens_tensor_node->cast<ParameterPtr>();
  1700. sens_tensor_param->set_tensor_layout(std::make_shared<TensorLayout>(loss_grad_layout));
  1701. return;
  1702. }
  1703. MS_LOG(EXCEPTION) << "The type of sens node is not Tensor or Parameter, it is unsupported now.";
  1704. }
  1705. // Use _GetTensorSlice operator to split the sens tensor
  1706. FuncGraphPtr func_graph = grad_sens_node->func_graph(); // only cnode can get the graph
  1707. MS_EXCEPTION_IF_NULL(func_graph);
  1708. Operator op = CreateGetTensorSliceOp(loss_grad_layout);
  1709. InsertGetTensorSliceOp(op, grad_sens_node, func_graph, 1, SPLIT_SENS);
  1710. }
  1711. void InsertForwardOps(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1712. MS_EXCEPTION_IF_NULL(distribute_operator);
  1713. MS_EXCEPTION_IF_NULL(cnode);
  1714. OperatorVector forward_op = distribute_operator->forward_op();
  1715. if (!forward_op.empty()) {
  1716. MS_LOG(INFO) << "Insert forward op for " << distribute_operator->name();
  1717. ForwardCommunication(forward_op, cnode);
  1718. }
  1719. }
  1720. void StepReplace(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1721. MS_EXCEPTION_IF_NULL(distribute_operator);
  1722. MS_EXCEPTION_IF_NULL(cnode);
  1723. // StepReplaceOp
  1724. OperatorVector replace_op = distribute_operator->replace_op();
  1725. if (!replace_op.empty()) {
  1726. MS_LOG(INFO) << "StepReplaceOp " << cnode->ToString();
  1727. StepReplaceOp(replace_op, cnode);
  1728. }
  1729. // StepReplaceGraph: after calling StepReplaceGraph, cnode can not be used anymore.
  1730. ReplaceGraphPtr replace_graph = distribute_operator->replace_graph(cnode);
  1731. if (!replace_op.empty() && replace_graph) {
  1732. MS_LOG(EXCEPTION) << "Only one of replace_op or replace_op can be used";
  1733. }
  1734. if (replace_graph) {
  1735. MS_LOG(INFO) << "StepReplaceGraph " << cnode->ToString();
  1736. StepReplaceGraph(replace_graph, cnode);
  1737. }
  1738. }
  1739. void HandleDropoutNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1740. MS_EXCEPTION_IF_NULL(distribute_operator);
  1741. MS_EXCEPTION_IF_NULL(cnode);
  1742. std::string op_name = distribute_operator->name();
  1743. if (op_name.find(DROPOUT_DO_MASK) == std::string::npos) {
  1744. return;
  1745. }
  1746. DropoutDoMaskInfoPtr dropout_do_mask = std::dynamic_pointer_cast<DropoutDoMaskInfo>(distribute_operator);
  1747. MS_EXCEPTION_IF_NULL(dropout_do_mask);
  1748. std::vector<Operator> replace_op = dropout_do_mask->GetDropoutGenMaskReplaceOp(cnode);
  1749. if (replace_op.empty()) {
  1750. MS_LOG(DEBUG) << "No need to replace dropout_gen_mask";
  1751. return;
  1752. }
  1753. if (cnode->inputs().size() != DROPOUT_DO_MASK_CNODE_INPUT_SIZE) {
  1754. MS_LOG(EXCEPTION) << "The size of drop out do mask cnode's input is not " << DROPOUT_DO_MASK_CNODE_INPUT_SIZE;
  1755. }
  1756. ReplaceOneOp(replace_op[0], cnode->input(DROPOUT_GEN_MASK_INDEX)->cast<CNodePtr>());
  1757. }
  1758. void HandleSpecialNode(const OperatorInfoPtr &distribute_operator, const CNodePtr &cnode) {
  1759. HandleDropoutNode(distribute_operator, cnode);
  1760. }
  1761. std::set<FuncGraphPtr> FindForwardGraphByRootNodes(const AnfNodeSet &root_all_nodes) {
  1762. // J->CNode->Graph
  1763. std::set<FuncGraphPtr> graph_set;
  1764. for (auto &node : root_all_nodes) {
  1765. MS_EXCEPTION_IF_NULL(node);
  1766. if (!node->isa<CNode>()) {
  1767. continue;
  1768. }
  1769. auto cnode = node->cast<CNodePtr>();
  1770. if ((cnode->size() < 2) || !IsValueNode<Primitive>(cnode->input(0))) {
  1771. continue;
  1772. }
  1773. auto expect_j_prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1774. if (expect_j_prim->name() != J) {
  1775. continue;
  1776. }
  1777. if (IsValueNode<FuncGraph>(cnode->input(1))) {
  1778. auto graph = GetValueNode<FuncGraphPtr>(cnode->input(1));
  1779. MS_LOG(DEBUG) << "Find the forward graph success";
  1780. graph_set.insert(graph);
  1781. }
  1782. }
  1783. return graph_set;
  1784. }
  1785. void StepSplitSens(const std::pair<CNodePtr, CNodePtr> &sens_loss_pair) {
  1786. CNodePtr sens_node = sens_loss_pair.first;
  1787. CNodePtr loss_node = sens_loss_pair.second;
  1788. auto loss_grad_layout = GetLossNodeGradOutputLayout(loss_node);
  1789. if (!loss_grad_layout.empty()) {
  1790. SplitSens(sens_node, loss_grad_layout[0]);
  1791. }
  1792. }
  1793. // Sens node satisfies the following conditions: cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  1794. std::vector<std::pair<CNodePtr, CNodePtr>> GetSensLossPairs(const FuncGraphPtr &root) {
  1795. MS_EXCEPTION_IF_NULL(root);
  1796. std::vector<std::pair<CNodePtr, CNodePtr>> sens_loss_pairs;
  1797. for (auto &node : root->nodes()) {
  1798. if (!node->isa<CNode>()) {
  1799. continue;
  1800. }
  1801. // cnode(sens)-->cnode(tuple_getitem)
  1802. auto sens_cnode = node->cast<CNodePtr>();
  1803. AnfNodePtr expect_tuple_getitem = sens_cnode->input(0);
  1804. MS_EXCEPTION_IF_NULL(expect_tuple_getitem);
  1805. if (!expect_tuple_getitem->isa<CNode>()) {
  1806. continue;
  1807. }
  1808. auto expect_tuple_getitem_cnode = expect_tuple_getitem->cast<CNodePtr>();
  1809. if (!IsSomePrimitive(expect_tuple_getitem_cnode, TUPLE_GETITEM)) {
  1810. continue;
  1811. }
  1812. // cnode(sens)-->cnode(tuple_getitem)-->cnode
  1813. AnfNodePtr expect_anonymous = expect_tuple_getitem_cnode->input(1);
  1814. MS_EXCEPTION_IF_NULL(expect_anonymous);
  1815. if (!expect_anonymous->isa<CNode>()) {
  1816. continue;
  1817. }
  1818. // cnode(sens)-->cnode(tuple_getitem)-->cnode-->cnode(J)
  1819. auto expect_anonymous_cnode = expect_anonymous->cast<CNodePtr>();
  1820. AnfNodePtr expect_j = expect_anonymous_cnode->input(0);
  1821. MS_EXCEPTION_IF_NULL(expect_j);
  1822. if (!expect_j->isa<CNode>()) {
  1823. continue;
  1824. }
  1825. auto expect_j_cnode = expect_j->cast<CNodePtr>();
  1826. if (!IsSomePrimitive(expect_j_cnode, J)) {
  1827. continue;
  1828. }
  1829. if (!IsValueNode<FuncGraph>(expect_j_cnode->input(1))) {
  1830. MS_LOG(EXCEPTION) << "Sens can't find the corresponding graph.";
  1831. }
  1832. auto func_graph = GetValueNode<FuncGraphPtr>(expect_j_cnode->input(1));
  1833. auto loss_cnode = FindLossCNode(func_graph);
  1834. if (loss_cnode == nullptr) {
  1835. MS_LOG(WARNING) << "Can not find the loss cnode";
  1836. continue;
  1837. }
  1838. std::pair<CNodePtr, CNodePtr> sens_loss_pair = std::make_pair(sens_cnode, loss_cnode);
  1839. sens_loss_pairs.push_back(sens_loss_pair);
  1840. }
  1841. return sens_loss_pairs;
  1842. }
  1843. void ParallelCommunication(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes,
  1844. const FuncGraphManagerPtr &manager) {
  1845. MS_EXCEPTION_IF_NULL(root);
  1846. MS_EXCEPTION_IF_NULL(manager);
  1847. TensorRedistribution tensor_redistribution;
  1848. std::vector<std::pair<CNodePtr, CNodePtr>> sens_loss_pairs = GetSensLossPairs(root);
  1849. bool has_backward = !sens_loss_pairs.empty();
  1850. // split sens must before inserting the operators.
  1851. for (auto &pair : sens_loss_pairs) {
  1852. // If the shape of grad-sens tensor is not [] or [1], use get tensor slice to handel it.
  1853. // If the type of sens node is not Tensor, it is unsupported now, do nothing default.
  1854. StepSplitSens(pair);
  1855. }
  1856. for (auto &node : all_nodes) {
  1857. MS_EXCEPTION_IF_NULL(node);
  1858. if (node->isa<CNode>()) {
  1859. auto cnode = node->cast<CNodePtr>();
  1860. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1861. continue;
  1862. }
  1863. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1864. if (distribute_operator == nullptr) {
  1865. continue;
  1866. }
  1867. // insert forward ops
  1868. InsertForwardOps(distribute_operator, cnode);
  1869. // insert redistribution ops
  1870. StepRedistribution(cnode, distribute_operator, cnode, tensor_redistribution, cnode);
  1871. // insert backward ops
  1872. if (has_backward) {
  1873. BackwardCommunication(distribute_operator, cnode, sens_loss_pairs);
  1874. }
  1875. HandleSpecialNode(distribute_operator, cnode);
  1876. } else if (IsValueNode<Tensor>(node)) {
  1877. StepSplitTensor(node, manager);
  1878. }
  1879. }
  1880. for (auto &node : all_nodes) {
  1881. MS_EXCEPTION_IF_NULL(node);
  1882. if (node->isa<CNode>()) {
  1883. auto cnode = node->cast<CNodePtr>();
  1884. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1885. continue;
  1886. }
  1887. OperatorInfoPtr distribute_operator = GetDistributeOperator(cnode);
  1888. if (distribute_operator == nullptr) {
  1889. continue;
  1890. }
  1891. // StepReplace
  1892. StepReplace(distribute_operator, cnode);
  1893. }
  1894. }
  1895. }
  1896. namespace {
  1897. void RevertSymbolicKeyInstance(const FuncGraphPtr &root, const AnfNodePtr &node) {
  1898. MS_EXCEPTION_IF_NULL(root);
  1899. MS_EXCEPTION_IF_NULL(node);
  1900. auto symbolic_key = GetValueNode<SymbolicKeyInstancePtr>(node);
  1901. MS_EXCEPTION_IF_NULL(symbolic_key);
  1902. auto all_upstream_node = root->manager()->node_users()[node];
  1903. for (auto &upstream_node : all_upstream_node) {
  1904. FuncGraphPtr fg = upstream_node.first->func_graph();
  1905. if (symbolic_key->node()->isa<Parameter>()) {
  1906. for (auto &param : root->parameters()) {
  1907. if (*param == *symbolic_key->node()) {
  1908. AnfNodePtr reverted_node = root->NewCNode({NewValueNode(prim::kPrimEmbed), param});
  1909. MS_EXCEPTION_IF_NULL(reverted_node);
  1910. MS_LOG(DEBUG) << "before replace " << node->ToString() << " to node " << reverted_node->DebugString();
  1911. (void)fg->manager()->Replace(node, reverted_node);
  1912. MS_LOG(DEBUG) << "revert node " << node->ToString() << " to node " << reverted_node->DebugString();
  1913. }
  1914. }
  1915. }
  1916. }
  1917. }
  1918. } // namespace
  1919. void HandleSymbolicKeyInstance(const FuncGraphPtr &root, const std::vector<AnfNodePtr> &all_nodes) {
  1920. MS_EXCEPTION_IF_NULL(root);
  1921. for (auto &node : all_nodes) {
  1922. // revert back SymbolicKeyInstance to embed() primitive
  1923. if (IsValueNode<SymbolicKeyInstance>(node)) {
  1924. RevertSymbolicKeyInstance(root, node);
  1925. continue;
  1926. }
  1927. }
  1928. }
  1929. std::string NodeParameterName(const CNodePtr &node) {
  1930. std::vector<AnfNodePtr> node_inputs{node->inputs()};
  1931. for (auto input : node_inputs) {
  1932. if (input->isa<Parameter>()) {
  1933. auto input_parameter = input->cast<ParameterPtr>();
  1934. if (input_parameter->has_default()) {
  1935. auto param_value = std::dynamic_pointer_cast<ParamValuePy>(input_parameter->default_param());
  1936. if (py::cast<bool>(parse::python_adapter::GetPyObjAttr(param_value->value(), REQUIRES_GRAD))) {
  1937. return py::cast<std::string>(parse::python_adapter::GetPyObjAttr(param_value->value(), PARAM_NAME));
  1938. }
  1939. }
  1940. }
  1941. }
  1942. return "";
  1943. }
  1944. void CheckpointStrategy(const FuncGraphPtr &func_graph) {
  1945. MS_EXCEPTION_IF_NULL(func_graph);
  1946. MS_LOG(DEBUG) << "Save strategy to checkpoint begin";
  1947. StrategyMap stra_map;
  1948. auto ret = func_graph->get_return();
  1949. auto all_nodes = DeepScopedGraphSearch(ret);
  1950. for (auto &node : all_nodes) {
  1951. MS_EXCEPTION_IF_NULL(node);
  1952. auto cnode = node->cast<CNodePtr>();
  1953. if ((cnode == nullptr) || !IsValueNode<Primitive>(cnode->input(0))) {
  1954. continue;
  1955. }
  1956. std::string param_name = NodeParameterName(cnode);
  1957. if (param_name.empty()) {
  1958. continue;
  1959. }
  1960. PrimitivePtr prim = GetValueNode<PrimitivePtr>(cnode->input(0));
  1961. MS_EXCEPTION_IF_NULL(prim);
  1962. OperatorInfoPtr operator_info = cnode->operator_info();
  1963. if (operator_info) {
  1964. StrategyPtr strategyPtr = operator_info->strategy();
  1965. MS_EXCEPTION_IF_NULL(node->scope());
  1966. stra_map[param_name] = strategyPtr;
  1967. }
  1968. }
  1969. if (StrategyCheckpoint::GetInstance().Save(stra_map) != SUCCESS) {
  1970. MS_LOG(EXCEPTION) << "Save strategy checkpoint failed";
  1971. }
  1972. }
  1973. void SetForwardFlag(const std::vector<AnfNodePtr> &all_nodes) {
  1974. for (auto &node : all_nodes) {
  1975. MS_EXCEPTION_IF_NULL(node);
  1976. if (!node->isa<CNode>()) {
  1977. continue;
  1978. }
  1979. auto cnode = node->cast<CNodePtr>();
  1980. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1981. continue;
  1982. }
  1983. // CNode is globally unique.
  1984. MS_LOG(DEBUG) << "Set forward flag " << cnode->DebugString() << ".";
  1985. cnode->set_in_forward_flag(true);
  1986. }
  1987. }
  1988. void SetForwardFlag(const AnfNodeSet &all_nodes) {
  1989. for (auto &node : all_nodes) {
  1990. MS_EXCEPTION_IF_NULL(node);
  1991. if (!node->isa<CNode>()) {
  1992. continue;
  1993. }
  1994. auto cnode = node->cast<CNodePtr>();
  1995. if (!IsValueNode<Primitive>(cnode->input(0))) {
  1996. continue;
  1997. }
  1998. // CNode is globally unique.
  1999. cnode->set_in_forward_flag(true);
  2000. }
  2001. }
  2002. std::set<FuncGraphPtr> ForwardGraph(const FuncGraphPtr &root) {
  2003. MS_EXCEPTION_IF_NULL(root);
  2004. const auto &all_nodes = root->nodes();
  2005. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2006. return graph_set;
  2007. }
  2008. std::vector<AnfNodePtr> FindRootForwardCNode(const FuncGraphPtr &graph, const AnfNodeSet &all_nodes) {
  2009. MS_EXCEPTION_IF_NULL(graph);
  2010. std::vector<AnfNodePtr> root_forward_nodes;
  2011. auto loss_cnode = FindLossCNode(graph);
  2012. if (loss_cnode == nullptr) {
  2013. MS_LOG(WARNING) << "Can not find the loss cnode";
  2014. return root_forward_nodes;
  2015. }
  2016. auto loss_cnode_id = loss_cnode->UniqueIdThroughCopy();
  2017. for (auto &node : all_nodes) {
  2018. MS_EXCEPTION_IF_NULL(node);
  2019. if (!node->isa<CNode>()) {
  2020. continue;
  2021. }
  2022. auto cnode = node->cast<CNodePtr>();
  2023. auto root_node_id = node->UniqueIdThroughCopy();
  2024. if (loss_cnode_id == root_node_id) {
  2025. root_forward_nodes = DeepLinkedGraphSearch(cnode);
  2026. break;
  2027. }
  2028. }
  2029. return root_forward_nodes;
  2030. }
  2031. void MarkForwardCNode(const FuncGraphPtr &root) {
  2032. MS_EXCEPTION_IF_NULL(root);
  2033. auto all_nodes = root->nodes();
  2034. std::set<FuncGraphPtr> graph_set = FindForwardGraphByRootNodes(all_nodes);
  2035. if (graph_set.empty()) {
  2036. MS_LOG(INFO) << "Can not find the forward graph, so mark the ops in root graph";
  2037. SetForwardFlag(all_nodes);
  2038. } else {
  2039. for (auto &func_graph : graph_set) {
  2040. MS_LOG(INFO) << "The sub graph size of root is " << root->func_graphs_used().size();
  2041. auto return_node = func_graph->get_return();
  2042. MS_EXCEPTION_IF_NULL(return_node);
  2043. auto all_dfs_nodes = DeepLinkedGraphSearch(return_node);
  2044. SetForwardFlag(all_dfs_nodes);
  2045. auto root_forward_nodes = FindRootForwardCNode(func_graph, all_nodes);
  2046. if (root_forward_nodes.empty()) {
  2047. continue;
  2048. }
  2049. // Mark forward flag for the nodes in root graph.
  2050. SetForwardFlag(root_forward_nodes);
  2051. }
  2052. }
  2053. }
  2054. Status ParallelInit() {
  2055. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2056. int32_t device_num = ParallelContext::GetInstance()->device_num();
  2057. int32_t global_rank = ParallelContext::GetInstance()->global_rank();
  2058. std::string backend = ParallelContext::GetInstance()->communication_backend();
  2059. std::string world_group;
  2060. if (backend == HCCL_BACKEND) {
  2061. world_group = HCCL_WORLD_GROUP;
  2062. } else if (backend == NCCL_BACKEND) {
  2063. world_group = NCCL_WORLD_GROUP;
  2064. } else {
  2065. MS_LOG(EXCEPTION) << "Invalid communication backend: " << backend;
  2066. }
  2067. uint32_t world_rank_size = 0;
  2068. if (!ParallelContext::GetInstance()->device_num_is_set()) {
  2069. if (!CommManager::GetInstance().GetRankSize(world_group, &world_rank_size)) {
  2070. MS_LOG(EXCEPTION) << "Get rank size failed";
  2071. }
  2072. device_num = UintToInt(world_rank_size);
  2073. MS_LOG(INFO) << "Get device num from communication model, the device num is " << device_num;
  2074. }
  2075. uint32_t rank_id = 0;
  2076. if (!ParallelContext::GetInstance()->global_rank_is_set()) {
  2077. if (!CommManager::GetInstance().GetRankID(world_group, &rank_id)) {
  2078. MS_LOG(EXCEPTION) << "Get rank id failed";
  2079. }
  2080. global_rank = UintToInt(rank_id);
  2081. MS_LOG(INFO) << "Get global rank from communication model, the global rank is " << global_rank;
  2082. }
  2083. if (!InitDevice(device_num, global_rank, backend)) {
  2084. MS_LOG(ERROR) << "Init device failed";
  2085. return FAILED;
  2086. }
  2087. MS_LOG(INFO) << "The parallel context: dev num: " << device_num << ", global rank: " << global_rank
  2088. << ", backend: " << backend << ", mirror_mean: " << ParallelContext::GetInstance()->mirror_mean()
  2089. << ", cast_before_mirror: " << ParallelContext::GetInstance()->cast_before_mirror();
  2090. return SUCCESS;
  2091. }
  2092. bool StepParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &optimizer) {
  2093. MS_EXCEPTION_IF_NULL(root);
  2094. MS_EXCEPTION_IF_NULL(optimizer);
  2095. MS_EXCEPTION_IF_NULL(ParallelContext::GetInstance());
  2096. std::string parallel_mode = ParallelContext::GetInstance()->parallel_mode();
  2097. // assume no change to graph
  2098. bool changes = false;
  2099. // control whether use model_parallel mode
  2100. if (!root->has_flag(AUTO_PARALLEL) || ((parallel_mode != AUTO_PARALLEL) && (parallel_mode != SEMI_AUTO_PARALLEL)) ||
  2101. (root->has_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY))) {
  2102. if (!root->has_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY)) {
  2103. if (HasStrategy(root)) {
  2104. MS_LOG(INFO) << "Strategies ignored in " << parallel_mode
  2105. << ", set_strategy() only valid in [semi_]auto_parallel.";
  2106. }
  2107. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  2108. }
  2109. return changes;
  2110. }
  2111. struct timeval start_time, end_time;
  2112. (void)gettimeofday(&start_time, nullptr);
  2113. MS_LOG(INFO) << "Now entering step parallel";
  2114. DumpGraph(root, std::string(STEP_PARALLEL_BEGIN));
  2115. pipeline::ResourceBasePtr res = optimizer->resource();
  2116. MS_EXCEPTION_IF_NULL(res);
  2117. FuncGraphManagerPtr manager = res->manager();
  2118. MS_EXCEPTION_IF_NULL(manager);
  2119. AnfNodePtr ret = root->get_return();
  2120. MS_EXCEPTION_IF_NULL(ret);
  2121. std::vector<AnfNodePtr> all_nodes = DeepScopedGraphSearch(ret);
  2122. std::reverse(all_nodes.begin(), all_nodes.end());
  2123. if (parallel_mode != AUTO_PARALLEL) {
  2124. TOTAL_OPS = 0;
  2125. if (ParallelInit() != SUCCESS) {
  2126. MS_LOG(EXCEPTION) << "Parallel init failed";
  2127. }
  2128. // mark the forward cnodes, parallel only care these nodes
  2129. MarkForwardCNode(root);
  2130. if (FindCommunicationOp(all_nodes)) {
  2131. MS_LOG(EXCEPTION) << "The graph contain communication op";
  2132. }
  2133. // extract shape and strategy, set operator_info
  2134. ExtractInformation(all_nodes);
  2135. ReshapeInit(all_nodes);
  2136. }
  2137. // save strategy as checkpoint for multi-train
  2138. if (StrategyCheckpoint::GetInstance().SaveCheckPointOn()) {
  2139. CheckpointStrategy(root);
  2140. }
  2141. HandleSymbolicKeyInstance(root, all_nodes);
  2142. // cover Parallel shape
  2143. CoverSliceShape(root);
  2144. // set the shape for optimizer's clone tensor
  2145. SetClonedTensorShapeForOptimizer(root);
  2146. // ForwardCommunication BackwardCommunication TensorRedistribution
  2147. ParallelCommunication(root, all_nodes, manager);
  2148. DumpGraph(root, std::string(STEP_PARALLEL_END));
  2149. // step parallel only run once
  2150. root->set_flag(SEMI_AUTO_PARALLEL_RUN_ONCE_ONLY, true);
  2151. res->results()[pipeline::kStepParallelGraph] = root;
  2152. // in auto parallel mode, no need to check if stategies set
  2153. root->set_flag(CHECK_SET_STRATEGY_VALID_ONCE_ONLY, true);
  2154. (void)gettimeofday(&end_time, nullptr);
  2155. uint64_t time = kUSecondInSecond * static_cast<uint64_t>(end_time.tv_sec - start_time.tv_sec);
  2156. time += static_cast<uint64_t>(end_time.tv_usec - start_time.tv_usec);
  2157. MS_LOG(INFO) << "Now leaving step parallel, used time: " << time << " us";
  2158. return changes;
  2159. }
  2160. // Needed by rec_parser
  2161. std::vector<std::string> ExtractInputsTensorName(const CNodePtr &node) {
  2162. std::vector<std::string> name_inputs;
  2163. std::vector<AnfNodePtr> all_inputs = node->inputs();
  2164. std::vector<AnfNodePtr> node_inputs{all_inputs.begin() + 1, all_inputs.end()};
  2165. std::string node_id = node->UniqueId();
  2166. name_inputs.push_back(node_id);
  2167. for (auto &input : node_inputs) {
  2168. std::string name = input->UniqueId();
  2169. name_inputs.push_back(name);
  2170. }
  2171. return name_inputs;
  2172. }
  2173. } // namespace parallel
  2174. } // namespace mindspore