You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

anf_ir_dump.cc 15 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500
  1. /**
  2. * Copyright 2019 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "debug/anf_ir_dump.h"
  17. #if defined(_WIN32) || defined(_WIN64)
  18. #include <stdlib.h>
  19. #endif
  20. #include <fstream>
  21. #include <map>
  22. #include <memory>
  23. #include <unordered_map>
  24. #include "ir/primitive.h"
  25. #include "ir/func_graph.h"
  26. #include "device/kernel_info.h"
  27. #include "utils/graph_utils.h"
  28. #include "session/anf_runtime_algorithm.h"
  29. namespace mindspore {
  30. const std::string ToShortString(const TypeId &typeId) {
  31. std::string label = TypeIdLabel(typeId);
  32. std::string prefix = "kNumberType";
  33. if (prefix.length() > label.length()) {
  34. return label;
  35. }
  36. auto position = label.find(prefix);
  37. // position is 0 when label begins with prefix
  38. if (position != 0) {
  39. return label;
  40. }
  41. auto sub_position = position + prefix.length();
  42. if (sub_position >= label.length()) {
  43. return label;
  44. }
  45. return label.substr(sub_position);
  46. }
  47. void PrintKernelFormatAndType(std::ostringstream &buffer, const std::string &fmt, const TypeId &type,
  48. const std::vector<size_t> &shape) {
  49. buffer << "<" << ToShortString(type);
  50. if (!fmt.empty()) {
  51. buffer << "x" << fmt << shape;
  52. }
  53. buffer << ">";
  54. }
  55. void PrintNodeOutputType(std::ostringstream &buffer, const AnfNodePtr &nd) {
  56. if (nd == nullptr) {
  57. return;
  58. }
  59. abstract::ShapePtr shape = dyn_cast<abstract::Shape>(nd->Shape());
  60. TypePtr type = dyn_cast<Type>(nd->Type());
  61. if ((nullptr != shape) && (nullptr != type)) {
  62. buffer << "<" << type << "x" << shape->shape() << ">";
  63. } else if (nullptr != type) {
  64. buffer << "<" << type << ">";
  65. } else {
  66. buffer << "<null>";
  67. }
  68. }
  69. void PrintNodeInputType(std::ostringstream &buffer, const AnfNodePtr &nd) {
  70. if (nd == nullptr) {
  71. return;
  72. }
  73. std::vector<AnfNodePtr> inputs = SuccIncoming(nd);
  74. size_t len = inputs.size();
  75. if (len > 1) {
  76. // skip inputs[0] which is Primitive value node
  77. for (size_t i = 1; i < len; ++i) {
  78. AnfNodePtr in = inputs[i];
  79. if (i != 1) {
  80. buffer << ", ";
  81. }
  82. PrintNodeOutputType(buffer, in);
  83. }
  84. }
  85. }
  86. void PrintInputAndOutputInferType(std::ostringstream &buffer, const AnfNodePtr &nd) {
  87. buffer << " : (";
  88. PrintNodeInputType(buffer, nd);
  89. buffer << ") -> (";
  90. PrintNodeOutputType(buffer, nd);
  91. buffer << ")";
  92. }
  93. struct SubGraphIRInfo {
  94. int32_t local_var;
  95. std::ostringstream buffer;
  96. OrderedMap<AnfNodePtr, int32_t> local_var_map;
  97. };
  98. void DumpGlobalInfoEntry(const FuncGraphPtr &graph, std::ostringstream &buffer) {
  99. if (graph == nullptr) {
  100. return;
  101. }
  102. buffer << "#IR entry : @" << graph->ToString() << "." << graph->debug_info()->get_id() << std::endl;
  103. buffer << "#flags :" << std::endl;
  104. for (const auto &flag : graph->flags()) {
  105. buffer << flag.first << " : " << flag.second << std::endl;
  106. }
  107. }
  108. void DumpKernelInfo(const CNodePtr &node, const std::shared_ptr<SubGraphIRInfo> &gsub) {
  109. if (node == nullptr || gsub == nullptr) {
  110. return;
  111. }
  112. auto kernel_info = node->kernel_info();
  113. if (kernel_info == nullptr || kernel_info->select_kernel_build_info() == nullptr) {
  114. return;
  115. }
  116. gsub->buffer << " : (";
  117. for (size_t i = 0; i < AnfAlgo::GetInputTensorNum(node); ++i) {
  118. if (i != 0) {
  119. gsub->buffer << ", ";
  120. }
  121. auto format = AnfAlgo::GetInputFormat(node, i);
  122. auto type = AnfAlgo::GetInputDeviceDataType(node, i);
  123. auto shape = AnfAlgo::GetInputDeviceShape(node, i);
  124. PrintKernelFormatAndType(gsub->buffer, format, type, shape);
  125. }
  126. gsub->buffer << ") -> (";
  127. for (size_t i = 0; i < AnfAlgo::GetOutputTensorNum(node); ++i) {
  128. if (i != 0) {
  129. gsub->buffer << ", ";
  130. }
  131. auto format = AnfAlgo::GetOutputFormat(node, i);
  132. auto type = AnfAlgo::GetOutputDeviceDataType(node, i);
  133. auto shape = AnfAlgo::GetOutputDeviceShape(node, i);
  134. PrintKernelFormatAndType(gsub->buffer, format, type, shape);
  135. }
  136. gsub->buffer << ")";
  137. gsub->buffer << std::endl;
  138. }
  139. void DumpParams(const FuncGraphPtr &graph, std::ostringstream &buffer, OrderedMap<AnfNodePtr, int32_t> *para_map) {
  140. if (graph == nullptr) {
  141. MS_LOG(INFO) << "Param graph is nullptr.";
  142. return;
  143. }
  144. std::vector<AnfNodePtr> parameters = graph->parameters();
  145. buffer << "#Total params : " << parameters.size() << std::endl;
  146. buffer << std::endl;
  147. // dump parameters
  148. int32_t para = 1;
  149. for (const auto &p : parameters) {
  150. if (p == nullptr) {
  151. continue;
  152. }
  153. auto parameter_ptr = p->cast<ParameterPtr>();
  154. if (parameter_ptr == nullptr) {
  155. MS_LOG(EXCEPTION) << "p cannot cast to ParameterPtr";
  156. }
  157. buffer << "%para" << para << " = " << parameter_ptr->name() << " : ";
  158. // print parameters' type and shape
  159. PrintNodeOutputType(buffer, p);
  160. auto kernel_info = p->kernel_info();
  161. if (kernel_info != nullptr && kernel_info->select_kernel_build_info() != nullptr) {
  162. buffer << " : ";
  163. auto type = AnfAlgo::GetOutputDeviceDataType(p, 0);
  164. auto format = AnfAlgo::GetOutputFormat(p, 0);
  165. auto shape = AnfAlgo::GetOutputDeviceShape(p, 0);
  166. PrintKernelFormatAndType(buffer, format, type, shape);
  167. buffer << " : IsWeight:" << std::boolalpha << AnfAlgo::IsParameterWeight(parameter_ptr);
  168. }
  169. buffer << std::endl;
  170. if (para_map != nullptr) {
  171. (*para_map)[p] = para++;
  172. }
  173. MS_LOG(DEBUG) << "Record param: " << p->ToString() << " graph belong : " << p->func_graph()->ToString();
  174. }
  175. }
  176. void DumpOperator(const AnfNodePtr &op, const std::shared_ptr<SubGraphIRInfo> &gsub) {
  177. if (op == nullptr) {
  178. MS_LOG(INFO) << "Param op is nullptr";
  179. return;
  180. }
  181. if (gsub == nullptr) {
  182. MS_LOG(INFO) << "Param gsub is nullptr";
  183. return;
  184. }
  185. if (IsValueNode<FuncGraph>(op)) {
  186. FuncGraphPtr fg = GetValueNode<FuncGraphPtr>(op);
  187. if (fg != nullptr) {
  188. gsub->buffer << "call @" << fg->ToString() << "." << fg->debug_info()->get_id();
  189. }
  190. } else if (op->isa<CNode>()) {
  191. if (gsub->local_var_map.find(op) != gsub->local_var_map.end()) {
  192. gsub->buffer << "%" << gsub->local_var_map[op];
  193. }
  194. } else if (op->isa<ValueNode>()) {
  195. gsub->buffer << GetValueNode(op)->ToString();
  196. } else {
  197. gsub->buffer << op->ToString();
  198. }
  199. }
  200. void DumpOperands(const AnfNodePtr &nd, OrderedMap<AnfNodePtr, int32_t> *para_map,
  201. const std::shared_ptr<SubGraphIRInfo> &gsub) {
  202. if (nd == nullptr || para_map == nullptr || gsub == nullptr) {
  203. return;
  204. }
  205. gsub->buffer << "(";
  206. std::vector<AnfNodePtr> inputs = SuccIncoming(nd);
  207. size_t len = inputs.size();
  208. if (len > 1) {
  209. // skip inputs[0] which is Primitive valuenode
  210. for (size_t i = 1; i < len; ++i) {
  211. AnfNodePtr in = inputs[i];
  212. MS_EXCEPTION_IF_NULL(in);
  213. if (i != 1) {
  214. gsub->buffer << ", ";
  215. }
  216. if (in->isa<Parameter>()) {
  217. if (!(*para_map)[in]) {
  218. gsub->buffer << "%arg";
  219. } else {
  220. gsub->buffer << "%para" << (*para_map)[in];
  221. }
  222. } else if (in->isa<CNode>()) {
  223. gsub->buffer << "%" << gsub->local_var_map[in];
  224. } else if (in->isa<ValueNode>() && !IsValueNode<FuncGraph>(in)) {
  225. // non Primitive valuenode
  226. gsub->buffer << GetValueNode(in)->ToString();
  227. } else if (IsValueNode<FuncGraph>(in)) {
  228. FuncGraphPtr fg = GetValueNode<FuncGraphPtr>(in);
  229. gsub->buffer << "@" << fg->ToString() << "." << fg->debug_info()->get_id();
  230. } else {
  231. gsub->buffer << in->ToString();
  232. }
  233. }
  234. }
  235. gsub->buffer << ")";
  236. }
  237. void DumpParallelInfo(const CNodePtr &node, const std::shared_ptr<SubGraphIRInfo> &gsub) {
  238. if ((node == nullptr) || (gsub == nullptr)) {
  239. return;
  240. }
  241. auto operator_info = node->operator_info();
  242. if (operator_info == nullptr) {
  243. return;
  244. }
  245. auto strategy = operator_info->strategy();
  246. if (strategy == nullptr) {
  247. return;
  248. }
  249. ValuePtr temp = MakeValue(strategy->GetInputDim());
  250. gsub->buffer << " { strategy: ";
  251. gsub->buffer << temp->ToString();
  252. gsub->buffer << " }";
  253. }
  254. void DumpOperateAttrs(const AnfNodePtr &op, const std::shared_ptr<SubGraphIRInfo> &gsub) {
  255. if (op == nullptr || gsub == nullptr) {
  256. return;
  257. }
  258. if (IsValueNode<Primitive>(op)) {
  259. PrimitivePtr primitive = GetValueNode<PrimitivePtr>(op);
  260. if (!primitive->instance_name().empty()) {
  261. gsub->buffer << " {";
  262. gsub->buffer << "instance name"
  263. << ": ";
  264. gsub->buffer << primitive->instance_name();
  265. gsub->buffer << "}";
  266. }
  267. auto attrs = primitive->attrs();
  268. if (!attrs.empty()) {
  269. gsub->buffer << " {";
  270. int i = 0;
  271. for (const auto &attr : attrs) {
  272. if (attr.first == PARALLEL_STRATEGY) {
  273. continue; // skip the strategy
  274. }
  275. if (i++ != 0) {
  276. gsub->buffer << ", ";
  277. }
  278. gsub->buffer << attr.first << ": ";
  279. if (attr.second == nullptr) {
  280. gsub->buffer << "null";
  281. } else {
  282. gsub->buffer << attr.second->ToString();
  283. }
  284. }
  285. gsub->buffer << "}";
  286. }
  287. }
  288. gsub->buffer << std::endl;
  289. }
  290. void DumpShape(const AnfNodePtr &nd, const FuncGraphPtr &sub_graph, const std::shared_ptr<SubGraphIRInfo> &gsub) {
  291. if (nd == nullptr || sub_graph == nullptr || gsub == nullptr) {
  292. return;
  293. }
  294. if (nd != sub_graph->get_return()) {
  295. gsub->buffer << " : (";
  296. PrintNodeInputType(gsub->buffer, nd);
  297. gsub->buffer << ") -> (";
  298. PrintNodeOutputType(gsub->buffer, nd);
  299. gsub->buffer << ")";
  300. } else {
  301. gsub->buffer << " : (";
  302. PrintNodeInputType(gsub->buffer, nd);
  303. gsub->buffer << ")";
  304. }
  305. gsub->buffer << std::endl;
  306. }
  307. void DumpCNode(const CNodePtr &nd, const FuncGraphPtr &sub_graph, OrderedMap<AnfNodePtr, int32_t> *const para_map,
  308. const std::shared_ptr<SubGraphIRInfo> &gsub, bool dump_full_name = false) {
  309. if (nd == nullptr || sub_graph == nullptr || para_map == nullptr || gsub == nullptr) {
  310. return;
  311. }
  312. if (nd != sub_graph->get_return()) {
  313. gsub->buffer << " %" << gsub->local_var << "(" << nd->ToString() << ")"
  314. << " = ";
  315. gsub->local_var_map[nd] = gsub->local_var++;
  316. } else {
  317. gsub->buffer << " ";
  318. }
  319. if (nd->inputs().empty()) {
  320. MS_LOG(EXCEPTION) << "Input of apply node is empty";
  321. }
  322. // print operator
  323. AnfNodePtr op = nd->input(0);
  324. DumpOperator(op, gsub);
  325. // print operands
  326. DumpOperands(nd, para_map, gsub);
  327. // print operator attrs
  328. DumpOperateAttrs(op, gsub);
  329. // print parallel info
  330. DumpParallelInfo(nd, gsub);
  331. // print shape info
  332. DumpShape(nd, sub_graph, gsub);
  333. // print kernel info
  334. DumpKernelInfo(nd, gsub);
  335. if (dump_full_name) {
  336. gsub->buffer << " : (" << nd->fullname_with_scope() << ")" << std::endl;
  337. }
  338. }
  339. void DumpIRInSubgraph(const std::vector<AnfNodePtr> &nodes, OrderedMap<AnfNodePtr, int32_t> *para_map,
  340. OrderedMap<FuncGraphPtr, std::shared_ptr<SubGraphIRInfo>> *const sub_graphs,
  341. bool dump_full_name = false) {
  342. if (para_map == nullptr || sub_graphs == nullptr) {
  343. return;
  344. }
  345. for (const auto &nd : nodes) {
  346. MS_EXCEPTION_IF_NULL(nd);
  347. FuncGraphPtr sub_graph = nd->func_graph();
  348. if (sub_graph == nullptr) {
  349. MS_LOG(DEBUG) << "Node[" << nd->ToString() << "] belongs to no graph!";
  350. continue;
  351. }
  352. std::shared_ptr<SubGraphIRInfo> gsub = (*sub_graphs)[sub_graph];
  353. if (gsub == nullptr) {
  354. gsub = std::make_shared<SubGraphIRInfo>();
  355. gsub->local_var = 0;
  356. (*sub_graphs)[sub_graph] = gsub;
  357. }
  358. if (!nd->isa<Parameter>()) {
  359. if (nd->isa<CNode>()) {
  360. // print and record output of operator if it is not 'Return'
  361. DumpCNode(nd->cast<CNodePtr>(), sub_graph, para_map, gsub, dump_full_name);
  362. } else {
  363. gsub->buffer << " " << nd->ToString() << std::endl;
  364. }
  365. }
  366. }
  367. }
  368. void DumpSubgraph(const OrderedMap<FuncGraphPtr, std::shared_ptr<SubGraphIRInfo>> *sub_graphs,
  369. const FuncGraphPtr &graph, std::ofstream &fout) {
  370. if (sub_graphs == nullptr || graph == nullptr) {
  371. return;
  372. }
  373. fout << "#Total subgraph : " << sub_graphs->size() << std::endl;
  374. fout << std::endl;
  375. for (const auto &sg : *sub_graphs) {
  376. fout << "subgraph flag:" << std::endl;
  377. MS_EXCEPTION_IF_NULL(sg.first);
  378. for (const auto &flag : sg.first->flags()) {
  379. fout << flag.first << " : " << flag.second << std::endl;
  380. }
  381. fout << "subgraph @" << sg.first->ToString() << ".";
  382. fout << sg.first->debug_info()->get_id() << "(";
  383. if (sg.first != graph) {
  384. fout << "%arg";
  385. }
  386. fout << ") {" << std::endl;
  387. MS_EXCEPTION_IF_NULL(sg.second);
  388. fout << sg.second->buffer.str();
  389. fout << "}" << std::endl;
  390. fout << std::endl;
  391. }
  392. }
  393. #ifdef ENABLE_DUMP_IR
  394. void DumpIR(const std::string &filename, const FuncGraphPtr &graph, bool dump_full_name) {
  395. if (graph == nullptr) {
  396. return;
  397. }
  398. if (filename.size() > PATH_MAX) {
  399. MS_LOG(ERROR) << "File path " << filename << " is too long.";
  400. return;
  401. }
  402. char real_path[PATH_MAX] = {0};
  403. #if defined(_WIN32) || defined(_WIN64)
  404. if (_fullpath(real_path, filename.c_str(), PATH_MAX) == nullptr) {
  405. MS_LOG(DEBUG) << "dir " << filename << " does not exit.";
  406. }
  407. #else
  408. if (nullptr == realpath(filename.c_str(), real_path)) {
  409. MS_LOG(DEBUG) << "Dir " << filename << " does not exit.";
  410. }
  411. #endif
  412. OrderedMap<AnfNodePtr, int32_t> para_map;
  413. std::string path_string = real_path;
  414. ChangeFileMode(path_string, S_IRWXU);
  415. std::ofstream fout(real_path);
  416. std::ostringstream buffer;
  417. if (!fout.is_open()) {
  418. MS_LOG(ERROR) << "Open dump file '" << real_path << "' failed!";
  419. return;
  420. }
  421. auto nodes = TopoSort(graph->get_return(), SuccDeeperSimple, AlwaysInclude);
  422. // dump global info
  423. DumpGlobalInfoEntry(graph, buffer);
  424. DumpParams(graph, buffer, &para_map);
  425. OrderedMap<FuncGraphPtr, std::shared_ptr<SubGraphIRInfo>> sub_graphs;
  426. // dump ir in each sub graph
  427. DumpIRInSubgraph(nodes, &para_map, &sub_graphs, dump_full_name);
  428. // output global info
  429. fout << buffer.str() << std::endl;
  430. // output each sub graph
  431. DumpSubgraph(&sub_graphs, graph, fout);
  432. fout.close();
  433. // set file mode to read only by user
  434. ChangeFileMode(path_string, S_IRUSR);
  435. }
  436. #else
  437. void DumpIR(const std::string &, const FuncGraphPtr &, bool) {
  438. static bool already_printed = false;
  439. if (already_printed) {
  440. return;
  441. }
  442. already_printed = true;
  443. MS_LOG(WARNING) << "The functionality of dumping function graph IR is disabled, "
  444. << "please recompile source to enable it. See help of building script.";
  445. }
  446. #endif
  447. } // namespace mindspore