You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

kernel_graph.cc 50 kB

adapte to remove inline merge me commit for remove inline deal witch multiple cases of switch in ConstructKernelGraph deal with switch and call cases in ConstructKernelGraph fix bug and rebase master ConstructKernelGraph adapte to remove inline fix InsertMultipleAssignToGraph bug add graph input to new graph which is created for switch input replace CreateNewParameterFromCNode to NewParameter in order to set new parameter's abstract and kernel_info avoids create a new switch repeatedly when the cnode is a call switch without real input null pointer check update frontend code Revert "update frontend code" This reverts commit ce1f600d1e9b4b47d9b81122f981bbbe505dd250. update frontend code PR_2948 fix bug of CheckLabalIndex handle switch_layer in ConstructKernelGraph add attr for assign node to avoid erasing by cse pass cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem rebase master Revert "cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem" This reverts commit 74c258f94260ca0769a1ef69c6ef8e831c301dbf. Revert "handle switch_layer in ConstructKernelGraph" This reverts commit cb5367f02d69facbca8d39e9234c501608aee27f. Revert "update frontend code PR_2948" This reverts commit 234ac583400a96a8ddd641f7a722e1ccd5e056c6. Revert "merge me commit for remove inline" This reverts commit 55c0ebd42b6699c7686f5ce585e745f87dd42280. fix diff after rebase master doing remove inline in me overwrite FindNodePrimitive Revert "doing remove inline in me" This reverts commit b42e893125bc624d323e855ac6ae615333c06e65.
5 years ago
5 years ago
5 years ago
adapte to remove inline merge me commit for remove inline deal witch multiple cases of switch in ConstructKernelGraph deal with switch and call cases in ConstructKernelGraph fix bug and rebase master ConstructKernelGraph adapte to remove inline fix InsertMultipleAssignToGraph bug add graph input to new graph which is created for switch input replace CreateNewParameterFromCNode to NewParameter in order to set new parameter's abstract and kernel_info avoids create a new switch repeatedly when the cnode is a call switch without real input null pointer check update frontend code Revert "update frontend code" This reverts commit ce1f600d1e9b4b47d9b81122f981bbbe505dd250. update frontend code PR_2948 fix bug of CheckLabalIndex handle switch_layer in ConstructKernelGraph add attr for assign node to avoid erasing by cse pass cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem rebase master Revert "cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem" This reverts commit 74c258f94260ca0769a1ef69c6ef8e831c301dbf. Revert "handle switch_layer in ConstructKernelGraph" This reverts commit cb5367f02d69facbca8d39e9234c501608aee27f. Revert "update frontend code PR_2948" This reverts commit 234ac583400a96a8ddd641f7a722e1ccd5e056c6. Revert "merge me commit for remove inline" This reverts commit 55c0ebd42b6699c7686f5ce585e745f87dd42280. fix diff after rebase master doing remove inline in me overwrite FindNodePrimitive Revert "doing remove inline in me" This reverts commit b42e893125bc624d323e855ac6ae615333c06e65.
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
adapte to remove inline merge me commit for remove inline deal witch multiple cases of switch in ConstructKernelGraph deal with switch and call cases in ConstructKernelGraph fix bug and rebase master ConstructKernelGraph adapte to remove inline fix InsertMultipleAssignToGraph bug add graph input to new graph which is created for switch input replace CreateNewParameterFromCNode to NewParameter in order to set new parameter's abstract and kernel_info avoids create a new switch repeatedly when the cnode is a call switch without real input null pointer check update frontend code Revert "update frontend code" This reverts commit ce1f600d1e9b4b47d9b81122f981bbbe505dd250. update frontend code PR_2948 fix bug of CheckLabalIndex handle switch_layer in ConstructKernelGraph add attr for assign node to avoid erasing by cse pass cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem rebase master Revert "cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem" This reverts commit 74c258f94260ca0769a1ef69c6ef8e831c301dbf. Revert "handle switch_layer in ConstructKernelGraph" This reverts commit cb5367f02d69facbca8d39e9234c501608aee27f. Revert "update frontend code PR_2948" This reverts commit 234ac583400a96a8ddd641f7a722e1ccd5e056c6. Revert "merge me commit for remove inline" This reverts commit 55c0ebd42b6699c7686f5ce585e745f87dd42280. fix diff after rebase master doing remove inline in me overwrite FindNodePrimitive Revert "doing remove inline in me" This reverts commit b42e893125bc624d323e855ac6ae615333c06e65.
5 years ago
adapte to remove inline merge me commit for remove inline deal witch multiple cases of switch in ConstructKernelGraph deal with switch and call cases in ConstructKernelGraph fix bug and rebase master ConstructKernelGraph adapte to remove inline fix InsertMultipleAssignToGraph bug add graph input to new graph which is created for switch input replace CreateNewParameterFromCNode to NewParameter in order to set new parameter's abstract and kernel_info avoids create a new switch repeatedly when the cnode is a call switch without real input null pointer check update frontend code Revert "update frontend code" This reverts commit ce1f600d1e9b4b47d9b81122f981bbbe505dd250. update frontend code PR_2948 fix bug of CheckLabalIndex handle switch_layer in ConstructKernelGraph add attr for assign node to avoid erasing by cse pass cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem rebase master Revert "cherry-pick ms commit[59b35f690ddcc94ff35a4f4eaf3816121b32235b]:temporary avoid list getitem problem" This reverts commit 74c258f94260ca0769a1ef69c6ef8e831c301dbf. Revert "handle switch_layer in ConstructKernelGraph" This reverts commit cb5367f02d69facbca8d39e9234c501608aee27f. Revert "update frontend code PR_2948" This reverts commit 234ac583400a96a8ddd641f7a722e1ccd5e056c6. Revert "merge me commit for remove inline" This reverts commit 55c0ebd42b6699c7686f5ce585e745f87dd42280. fix diff after rebase master doing remove inline in me overwrite FindNodePrimitive Revert "doing remove inline in me" This reverts commit b42e893125bc624d323e855ac6ae615333c06e65.
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263
  1. /**
  2. * Copyright 2019 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "backend/session/kernel_graph.h"
  17. #include <algorithm>
  18. #include <queue>
  19. #include <unordered_set>
  20. #include <set>
  21. #include "base/core_ops.h"
  22. #include "ir/param_info.h"
  23. #include "backend/session/anf_runtime_algorithm.h"
  24. #include "runtime/device/kernel_info.h"
  25. #include "backend/kernel_compiler/kernel_build_info.h"
  26. #include "runtime/device/kernel_runtime_manager.h"
  27. #include "backend/kernel_compiler/common_utils.h"
  28. namespace mindspore {
  29. namespace session {
  30. namespace {
  31. constexpr auto kIsFeatureMapOutput = "IsFeatureMapOutput";
  32. constexpr auto kIsFeatureMapInputList = "IsFeatureMapInputList";
  33. constexpr size_t k5dDims = 5;
  34. const std::set<std::string> kOpAssignKernelNameList = {prim::kPrimAssign->name(), prim::kPrimAssignAdd->name(),
  35. prim::kPrimAssignSub->name()};
  36. void PushNoVisitedNode(const AnfNodePtr &node, std::queue<AnfNodePtr> *que,
  37. std::unordered_set<AnfNodePtr> *visited_nodes) {
  38. MS_EXCEPTION_IF_NULL(node);
  39. MS_EXCEPTION_IF_NULL(que);
  40. MS_EXCEPTION_IF_NULL(visited_nodes);
  41. if (visited_nodes->find(node) == visited_nodes->end()) {
  42. que->push(node);
  43. (void)visited_nodes->insert(node);
  44. MS_LOG(DEBUG) << "Push que:" << node->DebugString();
  45. }
  46. }
  47. std::vector<AnfNodePtr> GetCallRealOutputs(const AnfNodePtr &call_node) {
  48. auto item_with_index =
  49. AnfAlgo::VisitKernelWithReturnType(call_node, 0, false, {prim::kPrimTupleGetItem, prim::kPrimMakeTuple});
  50. AnfNodePtr node = item_with_index.first;
  51. MS_EXCEPTION_IF_NULL(node);
  52. if (AnfAlgo::CheckPrimitiveType(node, prim::kPrimMakeTuple)) {
  53. auto outputs = AnfAlgo::GetAllOutput(node);
  54. std::set<AnfNodePtr> memo;
  55. std::vector<AnfNodePtr> new_output;
  56. for (auto &output : outputs) {
  57. if (memo.find(output) != memo.end()) {
  58. continue;
  59. }
  60. memo.insert(output);
  61. new_output.push_back(output);
  62. }
  63. if (new_output.size() == 1 && AnfAlgo::CheckPrimitiveType(new_output[0], prim::kPrimCall)) {
  64. node = new_output[0];
  65. }
  66. }
  67. if (!AnfAlgo::CheckPrimitiveType(node, prim::kPrimCall)) {
  68. return {node};
  69. }
  70. std::vector<AnfNodePtr> real_inputs;
  71. auto child_graphs = AnfAlgo::GetCallSwitchKernelGraph(node->cast<CNodePtr>());
  72. for (const auto &child_graph : child_graphs) {
  73. auto real_input = child_graph->output();
  74. auto child_real_inputs = GetCallRealOutputs(real_input);
  75. std::copy(child_real_inputs.begin(), child_real_inputs.end(), std::back_inserter(real_inputs));
  76. }
  77. return real_inputs;
  78. }
  79. bool IsSameLabel(const CNodePtr &left, const CNodePtr &right) {
  80. if (left == right) {
  81. return true;
  82. }
  83. if (left == nullptr || right == nullptr) {
  84. return false;
  85. }
  86. if (!IsPrimitiveCNode(left, GetCNodePrimitive(right))) {
  87. return false;
  88. }
  89. if (AnfAlgo::HasNodeAttr(kAttrLabelIndex, left) && AnfAlgo::HasNodeAttr(kAttrLabelIndex, right)) {
  90. return AnfAlgo::GetNodeAttr<uint32_t>(left, kAttrLabelIndex) ==
  91. AnfAlgo::GetNodeAttr<uint32_t>(right, kAttrLabelIndex);
  92. }
  93. return false;
  94. }
  95. void SyncDeviceInfoToValueNode(const ValueNodePtr &value_node, std::vector<std::string> *device_formats,
  96. std::vector<TypeId> *device_types) {
  97. MS_EXCEPTION_IF_NULL(value_node);
  98. MS_EXCEPTION_IF_NULL(device_formats);
  99. MS_EXCEPTION_IF_NULL(device_types);
  100. ValuePtr value = value_node->value();
  101. std::vector<tensor::TensorPtr> tensors;
  102. TensorValueToTensor(value, &tensors);
  103. if (!tensors.empty()) {
  104. if (tensors.size() != AnfAlgo::GetOutputTensorNum(value_node)) {
  105. MS_LOG(EXCEPTION) << "The size of tensors converted from value [" << tensors.size()
  106. << "] is not equal to output size of value node [" << AnfAlgo::GetOutputTensorNum(value_node)
  107. << "]";
  108. }
  109. device_formats->clear();
  110. device_types->clear();
  111. for (const auto &tensor : tensors) {
  112. MS_EXCEPTION_IF_NULL(tensor);
  113. auto device_sync = tensor->device_address();
  114. if (device_sync != nullptr) {
  115. auto device_address = std::dynamic_pointer_cast<device::DeviceAddress>(device_sync);
  116. MS_EXCEPTION_IF_NULL(device_address);
  117. device_formats->emplace_back(device_address->format());
  118. device_types->emplace_back(device_address->type_id());
  119. continue;
  120. }
  121. device_formats->emplace_back(kOpFormat_DEFAULT);
  122. device_types->emplace_back(kTypeUnknown);
  123. }
  124. }
  125. }
  126. } // namespace
  127. AnfNodePtr KernelGraph::MakeValueNode(const AnfNodePtr &node) {
  128. auto value_node = node->cast<ValueNodePtr>();
  129. if (value_node == nullptr) {
  130. return nullptr;
  131. }
  132. ValueNodePtr new_value_node = std::make_shared<ValueNode>(value_node->value());
  133. new_value_node->set_abstract(value_node->abstract());
  134. this->SetKernelInfoForNode(new_value_node);
  135. return new_value_node;
  136. }
  137. std::vector<AnfNodePtr> KernelGraph::outputs() const {
  138. auto graph_output = output();
  139. if (IsPrimitiveCNode(graph_output, prim::kPrimMakeTuple)) {
  140. auto make_tuple = output()->cast<CNodePtr>();
  141. MS_EXCEPTION_IF_NULL(make_tuple);
  142. auto &inputs = make_tuple->inputs();
  143. return std::vector<AnfNodePtr>(inputs.begin() + 1, inputs.end());
  144. }
  145. return std::vector<AnfNodePtr>(1, graph_output);
  146. }
  147. void KernelGraph::VisitNodeDescendants(const AnfNodePtr &node, std::queue<AnfNodePtr> *visit_queue,
  148. std::unordered_set<AnfNodePtr> *visited_nodes) {
  149. MS_EXCEPTION_IF_NULL(visit_queue);
  150. MS_EXCEPTION_IF_NULL(visited_nodes);
  151. auto it = node_output_edges_.find(node);
  152. if (it == node_output_edges_.end()) {
  153. // value node and parameter has no input,no need to print log
  154. if (node->isa<CNode>()) {
  155. MS_LOG(DEBUG) << "Can not find node [" << node->DebugString() << "]";
  156. }
  157. return;
  158. }
  159. // visit all reduce node first, then other nodes
  160. std::vector<AnfNodePtr> active_nodes;
  161. for (const auto &output_edge : it->second) {
  162. auto next_node = output_edge.first;
  163. MS_EXCEPTION_IF_NULL(next_node);
  164. if (node_input_num_.find(next_node) == node_input_num_.end()) {
  165. MS_LOG(EXCEPTION) << "Can't find node[" << next_node->DebugString() << "]";
  166. }
  167. MS_LOG(DEBUG) << "Decrease input:" << next_node->DebugString() << ",node:" << node->DebugString()
  168. << ",num: " << node_input_num_[next_node] << ",decrease num:" << output_edge.second;
  169. if (node_input_num_[next_node] < output_edge.second) {
  170. MS_LOG(DEBUG) << "Input node:" << next_node->DebugString() << ",node_output_num" << node_input_num_[next_node]
  171. << ",depend edge:" << output_edge.second;
  172. continue;
  173. }
  174. node_input_num_[next_node] = node_input_num_[next_node] - output_edge.second;
  175. // allreduce first
  176. if (node_input_num_[next_node] == 0 && visited_nodes->find(next_node) == visited_nodes->end()) {
  177. (void)visited_nodes->insert(next_node);
  178. if (AnfAlgo::IsCommunicationOp(next_node)) {
  179. MS_LOG(DEBUG) << "Visit node:" << next_node->DebugString();
  180. visit_queue->push(next_node);
  181. } else {
  182. active_nodes.emplace_back(next_node);
  183. }
  184. }
  185. }
  186. for (auto &node : active_nodes) {
  187. MS_EXCEPTION_IF_NULL(node);
  188. MS_LOG(DEBUG) << "Visit node:" << node->DebugString();
  189. visit_queue->push(node);
  190. }
  191. }
  192. void KernelGraph::SetExecOrderByDefault() {
  193. std::queue<AnfNodePtr> seed_nodes;
  194. UpdateNodeEdgeList(&seed_nodes);
  195. execution_order_.clear();
  196. std::unordered_set<AnfNodePtr> visited_nodes;
  197. std::queue<AnfNodePtr> zero_input_nodes;
  198. AnfNodePtr last_communication_node = nullptr;
  199. std::queue<AnfNodePtr> communication_descendants;
  200. while (!seed_nodes.empty() || last_communication_node != nullptr) {
  201. // seed nodes first, then visit last all reduce node descendant
  202. if (seed_nodes.empty()) {
  203. VisitNodeDescendants(last_communication_node, &communication_descendants, &visited_nodes);
  204. last_communication_node = nullptr;
  205. } else {
  206. zero_input_nodes.push(seed_nodes.front());
  207. seed_nodes.pop();
  208. }
  209. // all reduce node descendant first, then common queue
  210. while (!zero_input_nodes.empty() || !communication_descendants.empty()) {
  211. AnfNodePtr node = nullptr;
  212. bool is_communication_descendant = false;
  213. if (communication_descendants.empty()) {
  214. node = zero_input_nodes.front();
  215. zero_input_nodes.pop();
  216. } else {
  217. node = communication_descendants.front();
  218. communication_descendants.pop();
  219. is_communication_descendant = true;
  220. }
  221. // add execute node
  222. MS_EXCEPTION_IF_NULL(node);
  223. if (node->isa<CNode>() && AnfAlgo::IsRealKernel(node)) {
  224. execution_order_.push_back(node->cast<CNodePtr>());
  225. }
  226. // for all reduce node, visit last all reduce node descendant
  227. if (AnfAlgo::IsCommunicationOp(node)) {
  228. if (last_communication_node != nullptr) {
  229. VisitNodeDescendants(last_communication_node, &communication_descendants, &visited_nodes);
  230. }
  231. last_communication_node = node;
  232. } else if (is_communication_descendant) {
  233. VisitNodeDescendants(node, &communication_descendants, &visited_nodes);
  234. } else {
  235. VisitNodeDescendants(node, &zero_input_nodes, &visited_nodes);
  236. }
  237. }
  238. }
  239. CheckLoop();
  240. // resort start label / end goto
  241. execution_order_ = SortStartLabelAndEndGoto();
  242. }
  243. std::vector<CNodePtr> KernelGraph::SortStartLabelAndEndGoto() {
  244. std::vector<CNodePtr> re_order;
  245. if (start_label_ != nullptr) {
  246. re_order.push_back(start_label_);
  247. }
  248. for (auto &node : execution_order_) {
  249. if (node == start_label_ || node == end_goto_) {
  250. continue;
  251. }
  252. if (IsSameLabel(node, end_goto_)) {
  253. end_goto_ = node;
  254. MS_LOG(INFO) << "Replace end_goto_ in kernel graph:" << graph_id();
  255. continue;
  256. }
  257. if (IsSameLabel(node, start_label_)) {
  258. start_label_ = node;
  259. MS_LOG(INFO) << "Replace start_label_ in kernel graph:" << graph_id();
  260. continue;
  261. }
  262. re_order.push_back(node);
  263. }
  264. if (end_goto_ != nullptr) {
  265. re_order.push_back(end_goto_);
  266. }
  267. return re_order;
  268. }
  269. void KernelGraph::GetLoopNodesByDFS(AnfNodePtr node, uint32_t *loop_num) {
  270. MS_EXCEPTION_IF_NULL(node);
  271. auto node_input_it = node_input_edges_.find(node);
  272. if (node_input_it == node_input_edges_.end()) {
  273. MS_LOG(DEBUG) << "Node [" << node->DebugString() << "] don't have input edges.";
  274. return;
  275. }
  276. if (*loop_num != 0) {
  277. return;
  278. }
  279. visited_nodes_.insert(node);
  280. for (auto input_edge : node_input_edges_[node]) {
  281. size_t input_num = node_input_num_[input_edge.first];
  282. if (input_num == 0) {
  283. continue;
  284. }
  285. if (find(visited_nodes_.begin(), visited_nodes_.end(), input_edge.first) == visited_nodes_.end()) {
  286. MS_EXCEPTION_IF_NULL(input_edge.first);
  287. edge_to_[input_edge.first] = node;
  288. GetLoopNodesByDFS(input_edge.first, loop_num);
  289. } else {
  290. AnfNodePtr node_iter = node;
  291. MS_EXCEPTION_IF_NULL(node_iter);
  292. MS_LOG(INFO) << "Print loop nodes start:";
  293. for (; node_iter != input_edge.first && node_iter != nullptr; node_iter = edge_to_[node_iter]) {
  294. loop_nodes_.push(node_iter);
  295. node_input_num_[node_iter]--;
  296. MS_LOG(INFO) << "Get loop node:" << node_iter->DebugString();
  297. }
  298. if (node_iter != nullptr) {
  299. loop_nodes_.push(node_iter);
  300. loop_nodes_.push(node);
  301. (*loop_num)++;
  302. node_input_num_[node_iter]--;
  303. MS_LOG(INFO) << "Get loop node:" << node_iter->DebugString();
  304. MS_LOG(INFO) << "Get loop node:" << node->DebugString();
  305. MS_LOG(INFO) << "Print loop nodes end, Loop num:" << *loop_num;
  306. while (!loop_nodes_.empty()) {
  307. loop_nodes_.pop();
  308. }
  309. return;
  310. }
  311. }
  312. }
  313. }
  314. uint32_t KernelGraph::GetLoopNum(std::map<AnfNodePtr, size_t> none_zero_nodes) {
  315. uint32_t loop_num = 0;
  316. for (auto iter = none_zero_nodes.begin(); iter != none_zero_nodes.end(); iter++) {
  317. auto node = iter->first;
  318. MS_EXCEPTION_IF_NULL(node);
  319. if (node_input_num_[node] == 0) {
  320. continue;
  321. }
  322. edge_to_.clear();
  323. visited_nodes_.clear();
  324. GetLoopNodesByDFS(node, &loop_num);
  325. }
  326. return loop_num;
  327. }
  328. void KernelGraph::CheckLoop() {
  329. std::map<AnfNodePtr, size_t> none_zero_nodes;
  330. if (node_input_edges_.size() != node_input_num_.size()) {
  331. MS_LOG(EXCEPTION) << "node_input_edges_ size :" << node_input_edges_.size()
  332. << "not equal to node_input_num_ size:" << node_input_num_.size();
  333. }
  334. for (auto &it : node_input_num_) {
  335. MS_EXCEPTION_IF_NULL(it.first);
  336. string str;
  337. auto node_input_it = node_input_edges_.find(it.first);
  338. if (node_input_it == node_input_edges_.end()) {
  339. MS_LOG(EXCEPTION) << "Can't find node [" << it.first->DebugString() << "]";
  340. }
  341. if (it.second != 0) {
  342. for (const auto &input_edge : node_input_edges_[it.first]) {
  343. MS_EXCEPTION_IF_NULL(input_edge.first);
  344. str = str.append(input_edge.first->DebugString()).append("|");
  345. }
  346. MS_LOG(WARNING) << "Node:" << it.first->DebugString() << ",inputs:" << str << ",input num:" << it.second;
  347. none_zero_nodes[it.first] = it.second;
  348. }
  349. }
  350. // if don't consider control depend and loop exit,a exception will be throw
  351. if (!none_zero_nodes.empty()) {
  352. MS_LOG(WARNING) << "Nums of loop:" << GetLoopNum(none_zero_nodes);
  353. MS_LOG(EXCEPTION) << "Nodes have loop, left node num:" << none_zero_nodes.size();
  354. }
  355. }
  356. void ReSetParameterValueNodeFormatAndType(const AnfNodePtr &node, const std::string &format) {
  357. MS_EXCEPTION_IF_NULL(node);
  358. auto kernel_build_info_builder = std::make_shared<kernel::KernelBuildInfo::KernelBuildInfoBuilder>();
  359. MS_EXCEPTION_IF_NULL(kernel_build_info_builder);
  360. kernel_build_info_builder->SetOutputsFormat({format});
  361. kernel_build_info_builder->SetOutputsDeviceType({AnfAlgo::GetOutputInferDataType(node, 0)});
  362. AnfAlgo::SetSelectKernelBuildInfo(kernel_build_info_builder->Build(), node.get());
  363. }
  364. void KernelGraph::ResetInFormat(const AnfNodePtr &node, const std::string &format) const {
  365. MS_EXCEPTION_IF_NULL(node);
  366. for (size_t i = 0; i < AnfAlgo::GetInputTensorNum(node); i++) {
  367. auto in_node = AnfAlgo::GetInputNode(node->cast<CNodePtr>(), i);
  368. MS_EXCEPTION_IF_NULL(in_node);
  369. if ((in_node->isa<Parameter>() || in_node->isa<ValueNode>()) &&
  370. AnfAlgo::GetOutputInferShape(in_node, 0).size() == k5dDims) {
  371. ReSetParameterValueNodeFormatAndType(in_node, format);
  372. }
  373. }
  374. }
  375. CNodePtr KernelGraph::NewCNode(const std::vector<AnfNodePtr> &inputs) {
  376. auto cnode = FuncGraph::NewCNode(inputs);
  377. MS_EXCEPTION_IF_NULL(cnode);
  378. cnode->set_abstract(std::make_shared<abstract::AbstractNone>());
  379. CreateKernelInfoFromNewParameter(cnode);
  380. if (AnfAlgo::GetCNodeName(cnode) == prim::kPrimCast->name()) {
  381. AnfAlgo::SetNodeAttr(kIsBackendCast, MakeValue(false), cnode);
  382. }
  383. SetKernelInfoForNode(cnode);
  384. if (AnfAlgo::HasNodeAttr("io_format", cnode)) {
  385. auto attr = AnfAlgo::GetNodeAttr<std::string>(cnode, "io_format");
  386. if (attr == kOpFormat_NCDHW) {
  387. ResetInFormat(cnode, kOpFormat_NCDHW);
  388. }
  389. }
  390. AnfAlgo::SetGraphId(graph_id_, cnode.get());
  391. return cnode;
  392. }
  393. void KernelGraph::CreateKernelInfoFromNewParameter(const CNodePtr &cnode) {
  394. if (!AnfAlgo::IsGraphKernel(cnode)) {
  395. return;
  396. }
  397. auto func_graph = AnfAlgo::GetCNodeFuncGraphPtr(cnode);
  398. MS_EXCEPTION_IF_NULL(func_graph);
  399. std::vector<AnfNodePtr> node_list;
  400. std::vector<AnfNodePtr> input_list;
  401. std::vector<AnfNodePtr> output_list;
  402. kernel::GetValidKernelNodes(func_graph, &node_list, &input_list, &output_list);
  403. for (auto &anf_node : node_list) {
  404. MS_EXCEPTION_IF_NULL(anf_node);
  405. if (anf_node->kernel_info() == nullptr) {
  406. anf_node->set_kernel_info(std::make_shared<device::KernelInfo>());
  407. }
  408. auto anf_cnode = anf_node->cast<CNodePtr>();
  409. MS_EXCEPTION_IF_NULL(anf_cnode);
  410. for (size_t i = 0; i < AnfAlgo::GetInputTensorNum(anf_cnode); ++i) {
  411. auto input_node = anf_cnode->input(i + 1);
  412. MS_EXCEPTION_IF_NULL(input_node);
  413. if (IsValueNode<tensor::Tensor>(input_node)) {
  414. auto new_input_node = MakeValueNode(input_node);
  415. if (new_input_node != nullptr) {
  416. anf_cnode->set_input(i + 1, new_input_node);
  417. }
  418. }
  419. }
  420. }
  421. for (auto &anf_node : input_list) {
  422. MS_EXCEPTION_IF_NULL(anf_node);
  423. if (anf_node->kernel_info() == nullptr) {
  424. anf_node->set_kernel_info(std::make_shared<device::KernelInfo>());
  425. }
  426. }
  427. }
  428. void KernelGraph::ResetAssignInputFeaatureMapFlag(const CNodePtr &cnode) const {
  429. if (kOpAssignKernelNameList.find(AnfAlgo::GetCNodeName(cnode)) == kOpAssignKernelNameList.end()) {
  430. MS_LOG(EXCEPTION) << "Only supported to change the node [Assign , AssignSub, AssignAdd] node's input feature map "
  431. "flag but got the node :"
  432. << cnode->DebugString();
  433. }
  434. auto input_node = AnfAlgo::GetInputNode(cnode, 0);
  435. auto assign_value_node = AnfAlgo::GetInputNode(cnode, 1);
  436. if (AnfAlgo::IsFeatureMapOutput(input_node)) {
  437. return;
  438. }
  439. if (!AnfAlgo::IsFeatureMapOutput(input_node) && AnfAlgo::IsFeatureMapOutput(assign_value_node)) {
  440. auto kernel_info = static_cast<device::KernelInfo *>(input_node->kernel_info());
  441. kernel_info->set_feature_map_flag(true);
  442. }
  443. }
  444. void KernelGraph::SetKernelInfoForNode(const AnfNodePtr &node) const {
  445. MS_EXCEPTION_IF_NULL(node);
  446. auto kernel_info = std::make_shared<device::KernelInfo>();
  447. node->set_kernel_info(kernel_info);
  448. if (node->isa<CNode>()) {
  449. if (kOpAssignKernelNameList.find(AnfAlgo::GetCNodeName(node)) != kOpAssignKernelNameList.end()) {
  450. ResetAssignInputFeaatureMapFlag(node->cast<CNodePtr>());
  451. }
  452. #if defined(__APPLE__)
  453. std::vector<int> feature_map_input_indexs;
  454. #else
  455. std::vector<size_t> feature_map_input_indexs;
  456. #endif
  457. kernel_info->set_feature_map_flag(false);
  458. for (size_t index = 0; index < AnfAlgo::GetInputTensorNum(node); ++index) {
  459. if (AnfAlgo::IsFeatureMapInput(node, index)) {
  460. kernel_info->set_feature_map_flag(true);
  461. feature_map_input_indexs.push_back(index);
  462. }
  463. }
  464. if (AnfAlgo::GetInputTensorNum(node) == 0) {
  465. kernel_info->set_feature_map_flag(true);
  466. }
  467. if (AnfAlgo::IsRealKernel(node)) {
  468. // if the node only has the primitive(such as getNext) or the node's input has a feature map input
  469. // then the node's output is a feature map output
  470. AnfAlgo::SetNodeAttr(kIsFeatureMapOutput, MakeValue(kernel_info->is_feature_map()), node);
  471. AnfAlgo::SetNodeAttr(kIsFeatureMapInputList, MakeValue(feature_map_input_indexs), node);
  472. }
  473. return;
  474. }
  475. auto kernel_build_info_builder = std::make_shared<kernel::KernelBuildInfo::KernelBuildInfoBuilder>();
  476. // set the format of value_node to DEFAULT_FORMAT
  477. std::vector<TypeId> types;
  478. std::vector<std::string> formats = {kOpFormat_DEFAULT};
  479. if (node->isa<ValueNode>()) {
  480. kernel_info->set_feature_map_flag(false);
  481. types.emplace_back(kTypeUnknown);
  482. auto value_node = node->cast<ValueNodePtr>();
  483. SyncDeviceInfoToValueNode(value_node, &formats, &types);
  484. }
  485. if (node->isa<Parameter>()) {
  486. auto parameter = node->cast<ParameterPtr>();
  487. MS_EXCEPTION_IF_NULL(parameter);
  488. bool is_weight = AnfAlgo ::IsParameterWeight(parameter);
  489. kernel_info->set_feature_map_flag(!is_weight);
  490. types.push_back(is_weight ? kTypeUnknown : AnfAlgo::GetOutputInferDataType(parameter, 0));
  491. }
  492. // set parameter initaial device data type
  493. kernel_build_info_builder->SetOutputsFormat(formats);
  494. kernel_build_info_builder->SetOutputsDeviceType(types);
  495. AnfAlgo::SetSelectKernelBuildInfo(kernel_build_info_builder->Build(), node.get());
  496. }
  497. CNodePtr KernelGraph::NewCNode(const CNodePtr &cnode) {
  498. MS_EXCEPTION_IF_NULL(cnode);
  499. auto new_cnode = std::make_shared<CNode>(*cnode);
  500. // if a cnode is created not from front,this cnode won't be in map,so when replace it,we shouldn't update map
  501. if (BackendNodeExistInFrontBackendMap(cnode)) {
  502. FrontBackendlMapUpdate(cnode, new_cnode);
  503. }
  504. AnfAlgo::SetGraphId(graph_id_, cnode.get());
  505. return new_cnode;
  506. }
  507. ParameterPtr KernelGraph::NewParameter(const ParameterPtr &parameter) {
  508. auto abstract = parameter == nullptr ? std::make_shared<abstract::AbstractNone>() : parameter->abstract();
  509. auto new_parameter = NewParameter(abstract);
  510. MS_EXCEPTION_IF_NULL(new_parameter);
  511. // if don't use default parameter = nullptr,it remarks create a new parameter from a old parameter
  512. if (parameter != nullptr) {
  513. new_parameter->set_name(parameter->name());
  514. if (AnfAlgo::IsParameterWeight(parameter)) {
  515. new_parameter->set_default_param(parameter->default_param());
  516. }
  517. }
  518. // create kernel_info form new parameter
  519. SetKernelInfoForNode(new_parameter);
  520. AnfAlgo::SetGraphId(graph_id_, new_parameter.get());
  521. return new_parameter;
  522. }
  523. ParameterPtr KernelGraph::NewParameter(const abstract::AbstractBasePtr &abstract) {
  524. ParameterPtr new_parameter = add_parameter();
  525. new_parameter->set_abstract(abstract);
  526. MS_EXCEPTION_IF_NULL(new_parameter);
  527. // create kernel_info form new parameter
  528. SetKernelInfoForNode(new_parameter);
  529. AnfAlgo::SetGraphId(graph_id_, new_parameter.get());
  530. return new_parameter;
  531. }
  532. ValueNodePtr KernelGraph::NewValueNode(const ValueNodePtr &value_node) {
  533. MS_EXCEPTION_IF_NULL(value_node);
  534. auto new_value_node = MakeValueNode(value_node)->cast<ValueNodePtr>();
  535. AnfAlgo::SetGraphId(graph_id_, new_value_node.get());
  536. return new_value_node;
  537. }
  538. ValueNodePtr KernelGraph::NewValueNode(const AbstractBasePtr &abstract, const ValuePtr &value) {
  539. MS_EXCEPTION_IF_NULL(abstract);
  540. MS_EXCEPTION_IF_NULL(value);
  541. ValueNodePtr new_value_node = std::make_shared<ValueNode>(value);
  542. new_value_node->set_abstract(abstract);
  543. SetKernelInfoForNode(new_value_node);
  544. AnfAlgo::SetGraphId(graph_id(), new_value_node.get());
  545. return new_value_node;
  546. }
  547. AnfNodePtr KernelGraph::TransValueNodeTuple(const AbstractBasePtr abstract, const ValuePtr &value) {
  548. MS_EXCEPTION_IF_NULL(abstract);
  549. MS_EXCEPTION_IF_NULL(value);
  550. if (!abstract->isa<abstract::AbstractTuple>()) {
  551. auto new_value_node = NewValueNode(abstract, value);
  552. AddValueNodeToGraph(new_value_node);
  553. return new_value_node;
  554. }
  555. auto tuple_abstract = abstract->cast<abstract::AbstractTuplePtr>();
  556. auto value_tuple = value->cast<ValueTuplePtr>();
  557. MS_EXCEPTION_IF_NULL(tuple_abstract);
  558. MS_EXCEPTION_IF_NULL(value_tuple);
  559. if (tuple_abstract->size() != value_tuple->size()) {
  560. MS_LOG(EXCEPTION) << "Abstract size:" << tuple_abstract->size()
  561. << " is not equal to value size:" << value_tuple->size();
  562. }
  563. std::vector<AnfNodePtr> make_tuple_inputs = {
  564. mindspore::NewValueNode(std::make_shared<Primitive>(prim::kPrimMakeTuple->name()))};
  565. for (size_t index = 0; index < tuple_abstract->size(); ++index) {
  566. make_tuple_inputs.push_back(TransValueNodeTuple((*tuple_abstract)[index], (*value_tuple)[index]));
  567. }
  568. auto make_tuple = NewCNode(make_tuple_inputs);
  569. make_tuple->set_abstract(tuple_abstract);
  570. return make_tuple;
  571. }
  572. AnfNodePtr KernelGraph::TransParameterTuple(const AbstractBasePtr &abstract) {
  573. MS_EXCEPTION_IF_NULL(abstract);
  574. if (!abstract->isa<abstract::AbstractTuple>()) {
  575. return NewParameter(abstract);
  576. }
  577. auto tuple_abstract = abstract->cast<abstract::AbstractTuplePtr>();
  578. MS_EXCEPTION_IF_NULL(tuple_abstract);
  579. std::vector<AnfNodePtr> make_tuple_inputs = {
  580. mindspore::NewValueNode(std::make_shared<Primitive>(prim::kPrimMakeTuple->name()))};
  581. for (size_t index = 0; index < tuple_abstract->size(); ++index) {
  582. make_tuple_inputs.push_back(TransParameterTuple((*tuple_abstract)[index]));
  583. }
  584. auto make_tuple = NewCNode(make_tuple_inputs);
  585. make_tuple->set_abstract(tuple_abstract);
  586. return make_tuple;
  587. }
  588. AnfNodePtr KernelGraph::CreatTupleGetItemNode(const AnfNodePtr &node, size_t output_idx) {
  589. auto idx = mindspore::NewValueNode(SizeToLong(output_idx));
  590. MS_EXCEPTION_IF_NULL(idx);
  591. auto imm = std::make_shared<Int64Imm>(SizeToLong(output_idx));
  592. auto abstract_scalar = std::make_shared<abstract::AbstractScalar>(imm);
  593. idx->set_abstract(abstract_scalar);
  594. AnfNodePtr tuple_getitem = NewCNode({mindspore::NewValueNode(prim::kPrimTupleGetItem), node, idx});
  595. MS_EXCEPTION_IF_NULL(tuple_getitem);
  596. tuple_getitem->set_scope(node->scope());
  597. std::vector<size_t> origin_shape = AnfAlgo::GetOutputInferShape(node, output_idx);
  598. TypeId origin_type = AnfAlgo::GetOutputInferDataType(node, output_idx);
  599. AnfAlgo::SetOutputInferTypeAndShape({origin_type}, {origin_shape}, tuple_getitem.get());
  600. return tuple_getitem;
  601. }
  602. AnfNodePtr KernelGraph::TransCNodeTuple(const CNodePtr &node) {
  603. MS_EXCEPTION_IF_NULL(node);
  604. std::vector<TypeId> types;
  605. std::vector<std::vector<size_t>> shapes;
  606. std::vector<AnfNodePtr> make_tuple_inputs_list = {mindspore::NewValueNode(prim::kPrimMakeTuple)};
  607. for (size_t tuple_out_index = 0; tuple_out_index < AnfAlgo::GetOutputTensorNum(node); ++tuple_out_index) {
  608. make_tuple_inputs_list.emplace_back(CreatTupleGetItemNode(node, tuple_out_index));
  609. types.push_back(AnfAlgo::GetOutputInferDataType(node, tuple_out_index));
  610. shapes.emplace_back(AnfAlgo::GetOutputInferShape(node, tuple_out_index));
  611. }
  612. auto make_tuple = NewCNode(make_tuple_inputs_list);
  613. AnfAlgo::SetOutputInferTypeAndShape(types, shapes, make_tuple.get());
  614. return make_tuple;
  615. }
  616. AnfNodePtr KernelGraph::TransTupleToMakeTuple(const AnfNodePtr &node) {
  617. MS_EXCEPTION_IF_NULL(node);
  618. if (!AnfAlgo::IsTupleOutput(node)) {
  619. return node;
  620. }
  621. if (node->isa<Parameter>()) {
  622. return TransParameterTuple(node->abstract());
  623. } else if (node->isa<ValueNode>()) {
  624. auto value_node = node->cast<ValueNodePtr>();
  625. MS_EXCEPTION_IF_NULL(value_node);
  626. auto make_tuple = TransValueNodeTuple(value_node->abstract(), value_node->value());
  627. if (RemoveValueNodeFromGraph(value_node)) {
  628. MS_LOG(WARNING) << "Failed to remove the value_node " << value_node->DebugString();
  629. }
  630. return make_tuple;
  631. } else if (node->isa<CNode>()) {
  632. return TransCNodeTuple(node->cast<CNodePtr>());
  633. }
  634. MS_LOG(EXCEPTION) << "Unexpected node:" << node->DebugString();
  635. }
  636. const std::vector<AnfNodePtr> &KernelGraph::inputs() const {
  637. MS_EXCEPTION_IF_NULL(inputs_);
  638. return *inputs_;
  639. }
  640. void KernelGraph::FrontBackendlMapAdd(const AnfNodePtr &front_anf, const AnfNodePtr &backend_anf) {
  641. MS_EXCEPTION_IF_NULL(front_anf);
  642. MS_EXCEPTION_IF_NULL(backend_anf);
  643. if (front_backend_anf_map_.find(front_anf) != front_backend_anf_map_.end()) {
  644. MS_LOG(EXCEPTION) << "Anf " << front_anf->DebugString() << " has been exist in the front_backend_anf_map_";
  645. }
  646. if (backend_front_anf_map_.find(backend_anf) != backend_front_anf_map_.end()) {
  647. auto front_node = front_anf->cast<CNodePtr>();
  648. MS_EXCEPTION_IF_NULL(front_node);
  649. auto attr_input = front_node->input(kAnfPrimitiveIndex);
  650. if (!attr_input->isa<CNode>()) {
  651. MS_LOG(EXCEPTION) << "Kernel " << backend_anf->DebugString() << "has been exist in the backend_front_anf_map_";
  652. }
  653. }
  654. front_backend_anf_map_[front_anf] = backend_anf;
  655. backend_front_anf_map_[backend_anf] = front_anf;
  656. }
  657. void KernelGraph::FrontBackendlMapUpdate(const AnfNodePtr &old_backend_anf, const AnfNodePtr &new_backend_anf) {
  658. MS_EXCEPTION_IF_NULL(old_backend_anf);
  659. MS_EXCEPTION_IF_NULL(new_backend_anf);
  660. if (old_backend_anf == new_backend_anf) {
  661. MS_LOG(DEBUG) << "Old same with new:" << old_backend_anf->DebugString();
  662. return;
  663. }
  664. if (backend_front_anf_map_.find(old_backend_anf) == backend_front_anf_map_.end()) {
  665. MS_LOG(DEBUG) << "Old_backend_anf " << old_backend_anf->DebugString() << " is not exist in the map";
  666. return;
  667. }
  668. if (front_backend_anf_map_.find(backend_front_anf_map_[old_backend_anf]) == front_backend_anf_map_.end()) {
  669. MS_LOG(EXCEPTION) << "Anf is not exist in the map ,old " << old_backend_anf->DebugString();
  670. }
  671. if (IsInternalOutput(old_backend_anf)) {
  672. ReplaceInternalOutput(old_backend_anf, new_backend_anf);
  673. }
  674. front_backend_anf_map_[backend_front_anf_map_[old_backend_anf]] = new_backend_anf;
  675. backend_front_anf_map_[new_backend_anf] = backend_front_anf_map_[old_backend_anf];
  676. // delete old kernel
  677. (void)backend_front_anf_map_.erase(old_backend_anf);
  678. }
  679. // get kernel by anf
  680. AnfNodePtr KernelGraph::GetBackendAnfByFrontAnf(const AnfNodePtr &front_anf) {
  681. if (front_backend_anf_map_.find(front_anf) == front_backend_anf_map_.end()) {
  682. return nullptr;
  683. }
  684. return front_backend_anf_map_[front_anf];
  685. }
  686. bool KernelGraph::BackendNodeExistInFrontBackendMap(const AnfNodePtr &backend_anf) {
  687. return backend_front_anf_map_.find(backend_anf) != backend_front_anf_map_.end();
  688. }
  689. ValueNodePtr KernelGraph::GetValueNodeByTensor(const mindspore::tensor::TensorPtr &tensor) {
  690. if (tensor_to_value_node_map_.find(tensor) == tensor_to_value_node_map_.end()) {
  691. return nullptr;
  692. }
  693. return tensor_to_value_node_map_[tensor];
  694. }
  695. void KernelGraph::TensorValueNodeMapAdd(const tensor::TensorPtr &tensor, const ValueNodePtr &value_node) {
  696. MS_EXCEPTION_IF_NULL(tensor);
  697. MS_EXCEPTION_IF_NULL(value_node);
  698. tensor_to_value_node_map_[tensor] = value_node;
  699. }
  700. void KernelGraph::AddDependEdge(const AnfNodePtr &node, const AnfNodePtr &input, size_t depend_edge_num) {
  701. MS_EXCEPTION_IF_NULL(node);
  702. MS_EXCEPTION_IF_NULL(input);
  703. MS_LOG(DEBUG) << "Input:" << input->DebugString() << ", node:" << node->DebugString() << ",num:" << depend_edge_num;
  704. auto output_depend_edge = std::pair<AnfNodePtr, size_t>(node, depend_edge_num);
  705. // add output depend edge of input
  706. auto output_it = node_output_edges_.find(input);
  707. if (output_it == node_output_edges_.end()) {
  708. node_output_edges_[input] = std::vector<std::pair<AnfNodePtr, size_t>>{output_depend_edge};
  709. } else {
  710. output_it->second.push_back(output_depend_edge);
  711. }
  712. // add input depend edge of output
  713. auto input_depend_edge = std::pair<AnfNodePtr, size_t>(input, depend_edge_num);
  714. auto input_it = node_input_edges_.find(node);
  715. if (input_it == node_input_edges_.end()) {
  716. node_input_edges_[node] = std::vector<std::pair<AnfNodePtr, size_t>>{input_depend_edge};
  717. } else {
  718. input_it->second.push_back(input_depend_edge);
  719. }
  720. // add node input depend num
  721. auto depend_it = node_input_num_.find(node);
  722. if (depend_it == node_input_num_.end()) {
  723. node_input_num_[node] = depend_edge_num;
  724. } else {
  725. depend_it->second += depend_edge_num;
  726. }
  727. }
  728. std::vector<AnfNodePtr> KernelGraph::GetOutputNodes(const AnfNodePtr &node) {
  729. MS_EXCEPTION_IF_NULL(node);
  730. auto it = node_output_edges_.find(node);
  731. if (it == node_output_edges_.end()) {
  732. MS_LOG(EXCEPTION) << "Can't find node[" << node->DebugString() << "]";
  733. }
  734. std::vector<AnfNodePtr> output_nodes;
  735. auto trans = [](const std::pair<AnfNodePtr, size_t> &pair) -> AnfNodePtr { return pair.first; };
  736. (void)std::transform(it->second.begin(), it->second.end(), std::back_inserter(output_nodes), trans);
  737. return output_nodes;
  738. }
  739. // update the depend relations of control depend
  740. void KernelGraph::UpdateControlDependRelations(const std::vector<AnfNodePtr> &depends) {
  741. for (const auto &node : depends) {
  742. MS_EXCEPTION_IF_NULL(node);
  743. if (!node->isa<CNode>()) {
  744. return;
  745. }
  746. auto cnode = node->cast<CNodePtr>();
  747. MS_EXCEPTION_IF_NULL(cnode);
  748. if (!AnfAlgo::CheckPrimitiveType(node, prim::kPrimControlDepend)) {
  749. MS_LOG(EXCEPTION) << node->DebugString() << " is not a control depend";
  750. }
  751. auto prior_node = cnode->input(kControlDependPriorIndex);
  752. auto depend_node = cnode->input(kControlDependBehindIndex);
  753. MS_EXCEPTION_IF_NULL(prior_node);
  754. MS_EXCEPTION_IF_NULL(depend_node);
  755. std::vector<AnfNodePtr> prior_nodes = {prior_node};
  756. std::vector<AnfNodePtr> depend_nodes = {depend_node};
  757. int depend_mode = 0;
  758. if (AnfAlgo::HasNodeAttr(kControlDependMode, cnode)) {
  759. depend_mode = AnfAlgo::GetNodeAttr<int64_t>(cnode, kControlDependMode);
  760. }
  761. MS_LOG(DEBUG) << "Prior node[" << prior_node->DebugString() << "], depend node[" << depend_node->DebugString()
  762. << "], depend_mode :" << depend_mode << ".";
  763. if (prior_node->isa<Parameter>() && depend_mode == 1) {
  764. prior_nodes = GetOutputNodes(prior_node);
  765. }
  766. if (depend_node->isa<Parameter>()) {
  767. depend_nodes = depend_mode == 1 ? GetOutputNodes(depend_node) : std::vector<AnfNodePtr>{};
  768. }
  769. std::vector<AnfNodePtr> real_prior_nodes;
  770. std::set<AnfNodePtr> prior_visited;
  771. for (const auto &tmp : prior_nodes) {
  772. AnfAlgo::GetAllFatherRealNode(tmp, &real_prior_nodes, &prior_visited);
  773. }
  774. std::vector<AnfNodePtr> real_depend_nodes;
  775. std::set<AnfNodePtr> depend_visited;
  776. for (const auto &tmp : depend_nodes) {
  777. AnfAlgo::GetAllFatherRealNode(tmp, &real_depend_nodes, &depend_visited);
  778. }
  779. UpdateNodeInputOutputEdges(real_prior_nodes, real_depend_nodes);
  780. }
  781. }
  782. void KernelGraph::UpdateNodeInputOutputEdges(const std::vector<AnfNodePtr> &real_prior_nodes,
  783. const std::vector<AnfNodePtr> &real_depend_nodes) {
  784. for (auto &first_node : real_prior_nodes) {
  785. if (AnfAlgo::CheckPrimitiveType(first_node, prim::kPrimControlDepend)) {
  786. continue;
  787. }
  788. for (auto &second_node : real_depend_nodes) {
  789. if (AnfAlgo::CheckPrimitiveType(second_node, prim::kPrimControlDepend)) {
  790. continue;
  791. }
  792. MS_EXCEPTION_IF_NULL(first_node);
  793. MS_EXCEPTION_IF_NULL(second_node);
  794. MS_LOG(DEBUG) << "Add first node:" << first_node->DebugString() << ",second node:" << second_node->DebugString();
  795. AddDependEdge(second_node, first_node, 1);
  796. }
  797. }
  798. }
  799. bool KernelGraph::HandleControlDependNode(const AnfNodePtr &node, std::queue<AnfNodePtr> *que,
  800. std::unordered_set<AnfNodePtr> *visited_nodes) {
  801. MS_EXCEPTION_IF_NULL(node);
  802. MS_EXCEPTION_IF_NULL(que);
  803. MS_EXCEPTION_IF_NULL(visited_nodes);
  804. if (!node->isa<CNode>()) {
  805. return false;
  806. }
  807. auto cnode = node->cast<CNodePtr>();
  808. MS_EXCEPTION_IF_NULL(cnode);
  809. if (!AnfAlgo::CheckPrimitiveType(node, prim::kPrimControlDepend)) {
  810. return false;
  811. }
  812. // set the control depend visited but don't push it into the que
  813. if (visited_nodes->find(node) != visited_nodes->end()) {
  814. return true;
  815. }
  816. (void)visited_nodes->insert(cnode);
  817. // add a 0 depend num to keep the link relations to prepare for finding zero output nodes
  818. auto prior_node = cnode->input(kControlDependPriorIndex);
  819. auto depend_node = cnode->input(kControlDependBehindIndex);
  820. for (const auto &input : cnode->inputs()) {
  821. AddDependEdge(node, input, 0);
  822. }
  823. PushNoVisitedNode(depend_node, que, visited_nodes);
  824. PushNoVisitedNode(prior_node, que, visited_nodes);
  825. return true;
  826. }
  827. void KernelGraph::UpdateNodeEdgeList(std::queue<AnfNodePtr> *seed_nodes) {
  828. MS_EXCEPTION_IF_NULL(seed_nodes);
  829. node_output_edges_.clear();
  830. node_input_num_.clear();
  831. node_input_edges_.clear();
  832. std::vector<AnfNodePtr> control_depends;
  833. std::unordered_set<AnfNodePtr> visited_nodes;
  834. std::queue<AnfNodePtr> que;
  835. que.push(get_return());
  836. while (!que.empty()) {
  837. auto node = que.front();
  838. que.pop();
  839. MS_EXCEPTION_IF_NULL(node);
  840. if (node->isa<Parameter>() || node->isa<ValueNode>()) {
  841. seed_nodes->push(node);
  842. continue;
  843. }
  844. if (!node->isa<CNode>()) {
  845. continue;
  846. }
  847. auto cnode = node->cast<CNodePtr>();
  848. MS_EXCEPTION_IF_NULL(cnode);
  849. // handle data links
  850. for (const auto &input : cnode->inputs()) {
  851. size_t depend_edge_num = 1;
  852. // handle control depend,all inputs of control depend has no depend edge
  853. if (HandleControlDependNode(input, &que, &visited_nodes)) {
  854. control_depends.push_back(input);
  855. depend_edge_num = 0;
  856. }
  857. PushNoVisitedNode(input, &que, &visited_nodes);
  858. AddDependEdge(node, input, depend_edge_num);
  859. }
  860. }
  861. UpdateControlDependRelations(control_depends);
  862. }
  863. void KernelGraph::AddValueNodeToGraph(const ValueNodePtr &value_node) { (void)graph_value_nodes_.insert(value_node); }
  864. bool KernelGraph::IsInRefOutputMap(const AnfWithOutIndex &pair) const { return ref_out_in_map_.count(pair) != 0; }
  865. AnfWithOutIndex KernelGraph::GetRefCorrespondOutput(const AnfWithOutIndex &out_pair) const {
  866. if (!IsInRefOutputMap(out_pair)) {
  867. MS_LOG(EXCEPTION) << "Out_pair is not in RefOutputMap";
  868. }
  869. return ref_out_in_map_.at(out_pair);
  870. }
  871. void KernelGraph::AddRefCorrespondPairs(const AnfWithOutIndex &final_pair, const AnfWithOutIndex &origin_pair) {
  872. if (IsInRefOutputMap(final_pair)) {
  873. MS_LOG(EXCEPTION) << "Out_pair is already in RefOutputMap";
  874. }
  875. (void)ref_out_in_map_.insert(std::make_pair(final_pair, origin_pair));
  876. }
  877. bool KernelGraph::RemoveValueNodeFromGraph(const ValueNodePtr &value_node) {
  878. if (graph_value_nodes_.find(value_node) != graph_value_nodes_.end()) {
  879. (void)graph_value_nodes_.erase(value_node);
  880. return true;
  881. }
  882. return false;
  883. }
  884. void KernelGraph::ReplaceGraphInput(const AnfNodePtr &old_parameter, const AnfNodePtr &new_parameter) {
  885. // update graph inputs
  886. MS_EXCEPTION_IF_NULL(old_parameter);
  887. MS_EXCEPTION_IF_NULL(new_parameter);
  888. if (old_parameter == new_parameter) {
  889. return;
  890. }
  891. for (size_t i = 0; i < inputs_->size(); i++) {
  892. if ((*inputs_)[i] == old_parameter) {
  893. MS_LOG(INFO) << "Replace input of graph:" << graph_id_ << ", old graph input: " << old_parameter->DebugString()
  894. << ",new graph input:" << new_parameter->DebugString();
  895. (*inputs_)[i] = new_parameter;
  896. break;
  897. }
  898. }
  899. }
  900. void KernelGraph::ReplaceNode(NotNull<AnfNodePtr> old_anf_node, NotNull<AnfNodePtr> new_anf_node) {
  901. MS_EXCEPTION_IF_NULL(inputs_);
  902. {
  903. std::queue<AnfNodePtr> seed_nodes;
  904. UpdateNodeEdgeList(&seed_nodes);
  905. }
  906. auto it = node_output_edges_.find(old_anf_node);
  907. if (it != node_output_edges_.end()) {
  908. const auto &outputs = it->second;
  909. for (auto &output_node : outputs) {
  910. MS_EXCEPTION_IF_NULL(output_node.first);
  911. auto output_cnode = output_node.first->cast<CNodePtr>();
  912. MS_EXCEPTION_IF_NULL(output_cnode);
  913. auto &output_node_inputs = output_cnode->inputs();
  914. // don't replace node if it is a control edge => output_node.second == 0
  915. if (output_node.second == 0) {
  916. continue;
  917. }
  918. for (size_t i = 1; i < output_node_inputs.size(); i++) {
  919. if (output_node_inputs[i] == old_anf_node.get()) {
  920. output_cnode->set_input(i, new_anf_node);
  921. }
  922. }
  923. ReplaceGraphInput(old_anf_node, new_anf_node);
  924. }
  925. // update front to backend map
  926. FrontBackendlMapUpdate(old_anf_node, new_anf_node);
  927. }
  928. {
  929. std::queue<AnfNodePtr> seed_nodes;
  930. UpdateNodeEdgeList(&seed_nodes);
  931. }
  932. }
  933. void KernelGraph::UpdateExecuteKernelStreamLabel() {
  934. for (auto &kernel : execution_order_) {
  935. AnfAlgo::SetStreamDistinctionLabel(stream_distinction_label_, kernel.get());
  936. }
  937. }
  938. std::vector<std::shared_ptr<KernelGraph>> KernelGraph::GetLeafGraphOrder() {
  939. std::vector<std::shared_ptr<KernelGraph>> leaf_graph_order;
  940. if (IsLeafGraph()) {
  941. leaf_graph_order.push_back(shared_from_this()->cast<KernelGraphPtr>());
  942. } else {
  943. for (const auto &child_graph : child_graph_order_) {
  944. std::shared_ptr<KernelGraph> child_graph_ptr = child_graph.lock();
  945. MS_EXCEPTION_IF_NULL(child_graph_ptr);
  946. auto child_leaf_graph_order = child_graph_ptr->GetLeafGraphOrder();
  947. std::copy(child_leaf_graph_order.begin(), child_leaf_graph_order.end(), std::back_inserter(leaf_graph_order));
  948. }
  949. }
  950. return leaf_graph_order;
  951. }
  952. bool KernelGraph::IsLeafGraph() const { return child_graph_order_.empty(); }
  953. std::vector<CNodePtr> KernelGraph::FindNodeByPrimitive(const PrimitivePtr &primitive) const {
  954. std::vector<CNodePtr> result;
  955. for (const auto &anf : execution_order_) {
  956. if (AnfAlgo::CheckPrimitiveType(anf, primitive) && AnfAlgo::GetGraphId(anf.get()) == graph_id_) {
  957. result.push_back(anf->cast<CNodePtr>());
  958. }
  959. }
  960. return result;
  961. }
  962. std::vector<CNodePtr> KernelGraph::FindNodeByPrimitive(const std::vector<PrimitivePtr> &primitive_list) const {
  963. std::vector<CNodePtr> result;
  964. for (const auto &anf : execution_order_) {
  965. for (const auto &primitive : primitive_list) {
  966. if (AnfAlgo::CheckPrimitiveType(anf, primitive) && AnfAlgo::GetGraphId(anf.get()) == graph_id_) {
  967. result.push_back(anf->cast<CNodePtr>());
  968. }
  969. }
  970. }
  971. return result;
  972. }
  973. void KernelGraph::PrintGraphExecuteOrder() const {
  974. MS_LOG(INFO) << "Graph:" << graph_id_ << "execution order";
  975. for (size_t i = 0; i < execution_order_.size(); i++) {
  976. CNodePtr cur_cnode_ptr = execution_order_[i];
  977. MS_EXCEPTION_IF_NULL(cur_cnode_ptr);
  978. std::string event_str;
  979. std::string label_str;
  980. if (AnfAlgo::HasNodeAttr(kAttrEventId, cur_cnode_ptr)) {
  981. event_str = ", event_id[" + std::to_string(AnfAlgo::GetNodeAttr<uint32_t>(cur_cnode_ptr, kAttrEventId)) + "]";
  982. }
  983. if (AnfAlgo::HasNodeAttr(kAttrLabelIndex, cur_cnode_ptr)) {
  984. label_str = ", label_id[" + std::to_string(AnfAlgo::GetNodeAttr<uint32_t>(cur_cnode_ptr, kAttrLabelIndex)) + "]";
  985. }
  986. if (AnfAlgo::HasNodeAttr(kAttrLabelSwitchList, cur_cnode_ptr)) {
  987. auto label_list = AnfAlgo::GetNodeAttr<std::vector<uint32_t>>(cur_cnode_ptr, kAttrLabelSwitchList);
  988. label_str = ", label_id[";
  989. for (size_t j = 0; j < label_list.size(); ++j) {
  990. label_str += std::to_string(label_list[j]) + (j + 1 < label_list.size() ? ", " : "]");
  991. }
  992. }
  993. MS_LOG(INFO) << "Index[" << i << "], node name[" << cur_cnode_ptr->fullname_with_scope() << "], logic id["
  994. << AnfAlgo::GetStreamDistinctionLabel(cur_cnode_ptr.get()) << "], stream id["
  995. << AnfAlgo::GetStreamId(cur_cnode_ptr) << "], node info[" << cur_cnode_ptr->DebugString() << "]"
  996. << event_str << label_str;
  997. }
  998. }
  999. void KernelGraph::AddInternalOutput(const AnfNodePtr &front_node, const AnfNodePtr &node, int output_idx,
  1000. bool unique_target) {
  1001. if (front_node == nullptr || node == nullptr) {
  1002. MS_LOG(INFO) << "Front node or node is nullptr";
  1003. return;
  1004. }
  1005. MS_LOG(INFO) << "Add internal node " << node->DebugString() << " with front node " << front_node->DebugString();
  1006. front_to_internal_outputs_map_[front_node] = node;
  1007. if (AnfAlgo::CheckPrimitiveType(front_node, prim::kPrimTupleGetItem)) {
  1008. output_idx = AnfAlgo::GetTupleGetItemOutIndex(front_node->cast<CNodePtr>());
  1009. }
  1010. internal_outputs_to_front_map_[node][output_idx] = std::pair<AnfNodePtr, bool>(front_node, unique_target);
  1011. }
  1012. void KernelGraph::AddInternalOutputTensor(const AnfNodePtr &node, int output_idx, const tensor::TensorPtr &tensor) {
  1013. if (node == nullptr) {
  1014. return;
  1015. }
  1016. internal_outputs_tensor_map_[node][output_idx] = tensor;
  1017. }
  1018. tensor::TensorPtr KernelGraph::GetInternalOutputTensor(const AnfNodePtr &node, int output_idx) {
  1019. if (node == nullptr) {
  1020. return nullptr;
  1021. }
  1022. auto iter = internal_outputs_tensor_map_.find(node);
  1023. if (iter == internal_outputs_tensor_map_.end()) {
  1024. return nullptr;
  1025. }
  1026. auto idx_iter = iter->second.find(output_idx);
  1027. if (idx_iter == iter->second.end()) {
  1028. return nullptr;
  1029. }
  1030. return idx_iter->second;
  1031. }
  1032. void KernelGraph::ReplaceInternalOutput(const AnfNodePtr &node, const AnfNodePtr &new_node, int src_output_idx,
  1033. int dst_output_idx) {
  1034. if (new_node == nullptr || node == nullptr) {
  1035. MS_LOG(INFO) << "New node or node is nullptr";
  1036. return;
  1037. }
  1038. if (node == new_node) {
  1039. MS_LOG(INFO) << "New node and node is the same";
  1040. return;
  1041. }
  1042. auto iter = internal_outputs_to_front_map_.find(node);
  1043. if (iter == internal_outputs_to_front_map_.end()) {
  1044. MS_LOG(INFO) << "Node is not internal output";
  1045. return;
  1046. }
  1047. MS_LOG(INFO) << "Replace internal node " << node->DebugString() << " To " << new_node->DebugString();
  1048. auto &front_nodes = iter->second;
  1049. // Move all front nodes to new node mapping
  1050. if (src_output_idx == -1) {
  1051. internal_outputs_to_front_map_[new_node] = front_nodes;
  1052. for (const auto &front_node_iter : front_nodes) {
  1053. front_to_internal_outputs_map_[front_node_iter.second.first] = new_node;
  1054. }
  1055. internal_outputs_to_front_map_.erase(iter);
  1056. return;
  1057. }
  1058. // Move specified front node to new node mapping
  1059. auto front_node_iter = front_nodes.find(src_output_idx);
  1060. if (front_node_iter == front_nodes.end()) {
  1061. MS_LOG(INFO) << "The output " << src_output_idx << " of node " << node->DebugString() << " is not an internal node";
  1062. return;
  1063. }
  1064. auto front_node_pair = front_node_iter->second;
  1065. internal_outputs_to_front_map_[new_node][dst_output_idx] = front_node_pair;
  1066. front_to_internal_outputs_map_[front_node_pair.first] = new_node;
  1067. front_nodes.erase(src_output_idx);
  1068. if (front_nodes.empty()) {
  1069. internal_outputs_to_front_map_.erase(iter);
  1070. }
  1071. }
  1072. AnfNodePtr KernelGraph::GetInternalOutputByFrontNode(const AnfNodePtr &front_node) const {
  1073. auto iter = front_to_internal_outputs_map_.find(front_node);
  1074. if (iter != front_to_internal_outputs_map_.end()) {
  1075. return iter->second;
  1076. }
  1077. return nullptr;
  1078. }
  1079. bool KernelGraph::IsInternalOutput(const AnfNodePtr &node, int output_idx) const {
  1080. auto front_nodes_iter = internal_outputs_to_front_map_.find(node);
  1081. if (front_nodes_iter == internal_outputs_to_front_map_.end()) {
  1082. return false;
  1083. }
  1084. if (output_idx == -1) {
  1085. return true;
  1086. }
  1087. auto &front_nodes = front_nodes_iter->second;
  1088. if (front_nodes.find(output_idx) == front_nodes.end()) {
  1089. return false;
  1090. }
  1091. return true;
  1092. }
  1093. bool KernelGraph::IsUniqueTargetInternalOutput(const AnfNodePtr &node, int output_idx) const {
  1094. auto front_nodes_iter = internal_outputs_to_front_map_.find(node);
  1095. if (front_nodes_iter == internal_outputs_to_front_map_.end()) {
  1096. return false;
  1097. }
  1098. auto &front_nodes = front_nodes_iter->second;
  1099. auto idx_iter = front_nodes.find(output_idx);
  1100. if (idx_iter == front_nodes.end()) {
  1101. return false;
  1102. }
  1103. return idx_iter->second.second;
  1104. }
  1105. void KernelGraph::UpdateChildGraphOrder() {
  1106. MS_LOG(INFO) << "Update " << ToString() << " child graph order.";
  1107. SetExecOrderByDefault();
  1108. auto call_nodes = FindNodeByPrimitive(
  1109. {std::make_shared<Primitive>(prim::kPrimCall->name()), std::make_shared<Primitive>(prim::kPrimSwitch->name())});
  1110. std::vector<std::weak_ptr<KernelGraph>> child_graph_order;
  1111. for (auto &call_node : call_nodes) {
  1112. MS_EXCEPTION_IF_NULL(call_node);
  1113. auto call_child_graphs = AnfAlgo::GetCallSwitchKernelGraph(call_node->cast<CNodePtr>());
  1114. for (const auto &child_graph : call_child_graphs) {
  1115. MS_EXCEPTION_IF_NULL(child_graph);
  1116. if (child_graph != parent_graph_.lock()) {
  1117. auto shared_this = std::dynamic_pointer_cast<KernelGraph>(shared_from_this());
  1118. MS_EXCEPTION_IF_NULL(shared_this);
  1119. child_graph->set_parent_graph(shared_this);
  1120. }
  1121. child_graph_order.push_back(child_graph);
  1122. }
  1123. }
  1124. for (size_t i = 0; i < child_graph_order.size(); ++i) {
  1125. std::shared_ptr<KernelGraph> child_graph = child_graph_order[i].lock();
  1126. MS_EXCEPTION_IF_NULL(child_graph);
  1127. MS_LOG(INFO) << "Child graph[" << i << "][id:" << child_graph->graph_id() << "]";
  1128. }
  1129. child_graph_order_ = child_graph_order;
  1130. }
  1131. void KernelGraph::RemoveNodeFromGraph(const AnfNodePtr &node) {
  1132. if (backend_front_anf_map_.find(node) != backend_front_anf_map_.end()) {
  1133. auto front_node = backend_front_anf_map_[node];
  1134. (void)backend_front_anf_map_.erase(node);
  1135. (void)front_backend_anf_map_.erase(front_node);
  1136. }
  1137. if (node->isa<ValueNode>()) {
  1138. if (graph_value_nodes_.find(node->cast<ValueNodePtr>()) != graph_value_nodes_.end()) {
  1139. (void)graph_value_nodes_.erase(node->cast<ValueNodePtr>());
  1140. }
  1141. }
  1142. }
  1143. void KernelGraph::UpdateGraphDynamicAttr() {
  1144. for (const auto &cnode : execution_order_) {
  1145. if (AnfAlgo::IsDynamicShape(cnode)) {
  1146. MS_LOG(INFO) << "Update Graph Dynamic Attr";
  1147. is_dynamic_shape_ = true;
  1148. return;
  1149. }
  1150. }
  1151. is_dynamic_shape_ = false;
  1152. }
  1153. void KernelGraph::SetInputNodes() {
  1154. input_nodes_.clear();
  1155. for (const auto &input_node : inputs()) {
  1156. auto params = AnfAlgo::GetAllOutput(input_node);
  1157. std::copy(params.begin(), params.end(), std::back_inserter(input_nodes_));
  1158. }
  1159. }
  1160. void KernelGraph::SetOptimizerFlag() {
  1161. has_optimizer_ = false;
  1162. for (const auto &cnode : execution_order_) {
  1163. MS_EXCEPTION_IF_NULL(cnode);
  1164. auto node_name = AnfAlgo::GetCNodeName(cnode);
  1165. if (kOptOperatorSet.find(node_name) != kOptOperatorSet.end()) {
  1166. has_optimizer_ = true;
  1167. return;
  1168. }
  1169. if (node_name.find("Assign") != string::npos) {
  1170. for (auto &input : cnode->inputs()) {
  1171. MS_EXCEPTION_IF_NULL(input);
  1172. if (input->isa<Parameter>() && AnfAlgo::IsParameterWeight(input->cast<ParameterPtr>())) {
  1173. has_optimizer_ = true;
  1174. return;
  1175. }
  1176. }
  1177. }
  1178. }
  1179. }
  1180. std::string KernelGraph::ToString() const { return std::string("kernel_graph_").append(std::to_string(graph_id_)); }
  1181. KernelGraph::~KernelGraph() {
  1182. device::KernelRuntimeManager::Instance().ClearGraphResource(graph_id_, *inputs_, graph_value_nodes_,
  1183. execution_order_);
  1184. }
  1185. } // namespace session
  1186. } // namespace mindspore