You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

e2e_dump.cc 17 kB

4 years ago
5 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421
  1. /**
  2. * Copyright 2020-2021 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "debug/data_dump/e2e_dump.h"
  17. #include <unistd.h>
  18. #include <algorithm>
  19. #include <map>
  20. #include <vector>
  21. #include "debug/data_dump/dump_json_parser.h"
  22. #include "common/trans.h"
  23. #include "debug/anf_ir_utils.h"
  24. #include "debug/common.h"
  25. #include "backend/session/anf_runtime_algorithm.h"
  26. #include "utils/ms_context.h"
  27. #include "runtime/device/kernel_runtime_manager.h"
  28. #include "utils/config_manager.h"
  29. #include "utils/file_utils.h"
  30. #ifdef ENABLE_DEBUGGER
  31. #include "debug/debug_services.h"
  32. #include "debug/tensor_load.h"
  33. #include "debug/debugger/debugger.h"
  34. #endif
  35. namespace mindspore {
  36. bool E2eDump::IsDeviceTargetGPU() {
  37. auto context = MsContext::GetInstance();
  38. MS_EXCEPTION_IF_NULL(context);
  39. return context->get_param<std::string>(MS_CTX_DEVICE_TARGET) == kGPUDevice;
  40. }
  41. void E2eDump::DumpGPUMemToFile(const std::string &file_path, const std::string &original_kernel_name,
  42. const device::DeviceAddress &addr, const ShapeVector &int_shapes,
  43. const TypeId &host_type, const TypeId &device_type, bool trans_flag, size_t slot,
  44. const Debugger *debugger) {
  45. #ifdef ENABLE_DEBUGGER
  46. auto format = kOpFormat_DEFAULT;
  47. MS_EXCEPTION_IF_NULL(debugger);
  48. auto ret = debugger->DumpTensorToFile(original_kernel_name, trans_flag, file_path, format, int_shapes, host_type,
  49. device_type, addr.format(), slot);
  50. if (!ret) {
  51. MS_LOG(INFO) << "DumpTensorToFile Failed: flag:" << trans_flag << ", path:" << file_path
  52. << ", host_format:" << format;
  53. }
  54. #endif
  55. }
  56. void E2eDump::DumpOutput(const session::KernelGraph *graph, const std::string &dump_path, const Debugger *debugger) {
  57. MS_EXCEPTION_IF_NULL(graph);
  58. auto &dump_json_parser = DumpJsonParser::GetInstance();
  59. if (!dump_json_parser.OutputNeedDump()) {
  60. return;
  61. }
  62. MS_LOG(INFO) << "Start e2e dump output";
  63. bool trans_flag = dump_json_parser.trans_flag();
  64. const auto &apply_kernels = graph->execution_order();
  65. for (const auto &node : apply_kernels) {
  66. MS_EXCEPTION_IF_NULL(node);
  67. std::string kernel_name = GetKernelNodeName(node);
  68. if (!dump_json_parser.NeedDump(kernel_name)) {
  69. continue;
  70. }
  71. DumpJsonParser::GetInstance().MatchKernel(kernel_name);
  72. DumpOutputImpl(node, trans_flag, dump_path, &kernel_name, debugger);
  73. }
  74. }
  75. void E2eDump::DumpOutputSingleNode(const CNodePtr &node, const std::string &dump_path, const Debugger *debugger) {
  76. auto &dump_json_parser = DumpJsonParser::GetInstance();
  77. if (!dump_json_parser.OutputNeedDump()) {
  78. return;
  79. }
  80. bool trans_flag = dump_json_parser.trans_flag();
  81. MS_EXCEPTION_IF_NULL(node);
  82. std::string kernel_name = GetKernelNodeName(node);
  83. if (!dump_json_parser.NeedDump(kernel_name)) {
  84. return;
  85. }
  86. DumpJsonParser::GetInstance().MatchKernel(kernel_name);
  87. DumpOutputImpl(node, trans_flag, dump_path, &kernel_name, debugger);
  88. }
  89. void E2eDump::DumpOutputImpl(const CNodePtr &node, bool trans_flag, const std::string &dump_path,
  90. std::string *kernel_name, const Debugger *debugger) {
  91. MS_EXCEPTION_IF_NULL(node);
  92. GetFileKernelName(NOT_NULL(kernel_name));
  93. auto output_size = AnfAlgo::GetOutputTensorNum(node);
  94. for (size_t j = 0; j < output_size; ++j) {
  95. if (!AnfAlgo::OutputAddrExist(node, j)) {
  96. continue;
  97. }
  98. auto addr = AnfAlgo::GetOutputAddr(node, j);
  99. MS_EXCEPTION_IF_NULL(addr);
  100. ShapeVector int_shapes;
  101. GetDumpIntShape(node, j, NOT_NULL(&int_shapes), trans_flag);
  102. auto type = AnfAlgo::GetOutputInferDataType(node, j);
  103. auto device_type = AnfAlgo::GetOutputDeviceDataType(node, j);
  104. std::string op_type = AnfAlgo::GetCNodeName(node);
  105. std::string op_name = GetOpNameWithoutScope(*kernel_name);
  106. uint32_t task_id = 0;
  107. uint32_t stream_id = 0;
  108. uint64_t timestamp = GetTimeStamp();
  109. std::string file_path = dump_path + '/' + op_type + '.' + op_name + '.' + std::to_string(task_id) + '.' +
  110. std::to_string(stream_id) + '.' + std::to_string(timestamp) + ".output." +
  111. std::to_string(j);
  112. if (IsDeviceTargetGPU()) {
  113. DumpGPUMemToFile(file_path, GetKernelNodeName(node), *addr, int_shapes, type, device_type, trans_flag, j,
  114. debugger);
  115. } else {
  116. DumpMemToFile(file_path, *addr, int_shapes, type, trans_flag);
  117. }
  118. }
  119. }
  120. void E2eDump::DumpInput(const session::KernelGraph *graph, const std::string &dump_path, const Debugger *debugger) {
  121. MS_EXCEPTION_IF_NULL(graph);
  122. auto &dump_json_parser = DumpJsonParser::GetInstance();
  123. if (!dump_json_parser.InputNeedDump()) {
  124. return;
  125. }
  126. MS_LOG(INFO) << "Start e2e dump input";
  127. bool trans_flag = dump_json_parser.trans_flag();
  128. const auto &apply_kernels = graph->execution_order();
  129. for (const auto &node : apply_kernels) {
  130. MS_EXCEPTION_IF_NULL(node);
  131. std::string kernel_name = GetKernelNodeName(node);
  132. if (!dump_json_parser.NeedDump(kernel_name)) {
  133. continue;
  134. }
  135. DumpJsonParser::GetInstance().MatchKernel(kernel_name);
  136. DumpInputImpl(node, trans_flag, dump_path, &kernel_name, debugger);
  137. }
  138. }
  139. void E2eDump::DumpInputSingleNode(const CNodePtr &node, const std::string &dump_path, const Debugger *debugger) {
  140. auto &dump_json_parser = DumpJsonParser::GetInstance();
  141. if (!dump_json_parser.InputNeedDump()) {
  142. return;
  143. }
  144. bool trans_flag = dump_json_parser.trans_flag();
  145. MS_EXCEPTION_IF_NULL(node);
  146. std::string kernel_name = GetKernelNodeName(node);
  147. if (!dump_json_parser.NeedDump(kernel_name)) {
  148. return;
  149. }
  150. DumpJsonParser::GetInstance().MatchKernel(kernel_name);
  151. DumpInputImpl(node, trans_flag, dump_path, &kernel_name, debugger);
  152. }
  153. void E2eDump::DumpInputImpl(const CNodePtr &node, bool trans_flag, const std::string &dump_path,
  154. std::string *kernel_name, const Debugger *debugger) {
  155. MS_EXCEPTION_IF_NULL(node);
  156. GetFileKernelName(NOT_NULL(kernel_name));
  157. auto input_size = AnfAlgo::GetInputTensorNum(node);
  158. for (size_t j = 0; j < input_size; ++j) {
  159. auto kernel_with_index = AnfAlgo::GetPrevNodeOutput(node, j);
  160. auto input = kernel_with_index.first;
  161. auto index = kernel_with_index.second;
  162. if (!AnfAlgo::OutputAddrExist(input, index)) {
  163. continue;
  164. }
  165. auto addr = AnfAlgo::GetOutputAddr(input, index);
  166. MS_EXCEPTION_IF_NULL(addr);
  167. std::string tensor_name = GetKernelNodeName(node);
  168. size_t slot = j;
  169. if (IsDeviceTargetGPU()) {
  170. auto input_kernel = node->input(j + 1);
  171. std::string input_kernel_name = GetKernelNodeName(input_kernel);
  172. tensor_name = input_kernel_name;
  173. slot = 0;
  174. }
  175. ShapeVector int_shapes;
  176. GetDumpIntShape(input, index, NOT_NULL(&int_shapes), trans_flag);
  177. auto type = AnfAlgo::GetOutputInferDataType(input, index);
  178. auto device_type = AnfAlgo::GetOutputDeviceDataType(input, index);
  179. std::string op_type = AnfAlgo::GetCNodeName(node);
  180. std::string op_name = GetOpNameWithoutScope(*kernel_name);
  181. uint64_t timestamp = GetTimeStamp();
  182. uint32_t task_id = 0;
  183. uint32_t stream_id = 0;
  184. std::string file_path = dump_path + '/' + op_type + '.' + op_name + '.' + std::to_string(task_id) + '.' +
  185. std::to_string(stream_id) + '.' + std::to_string(timestamp) + ".input." + std::to_string(j);
  186. MS_EXCEPTION_IF_NULL(addr);
  187. if (IsDeviceTargetGPU()) {
  188. DumpGPUMemToFile(file_path, tensor_name, *addr, int_shapes, type, device_type, trans_flag, slot, debugger);
  189. } else {
  190. DumpMemToFile(file_path, *addr, int_shapes, type, trans_flag);
  191. }
  192. }
  193. }
  194. void E2eDump::DumpSingleAnfNode(const AnfNodePtr &anf_node, const size_t output_index, const std::string &dump_path,
  195. bool trans_flag, std::map<std::string, size_t> *const_map, const Debugger *debugger) {
  196. MS_EXCEPTION_IF_NULL(anf_node);
  197. auto &dump_json_parser = DumpJsonParser::GetInstance();
  198. if ((!anf_node->isa<Parameter>() && !anf_node->isa<ValueNode>()) || IsValueNode<StringImm>(anf_node)) {
  199. return;
  200. }
  201. std::string node_name = GetKernelNodeName(anf_node);
  202. std::string dump_name = node_name;
  203. if (anf_node->isa<ValueNode>()) {
  204. MS_EXCEPTION_IF_NULL(const_map);
  205. auto iter = const_map->find(node_name);
  206. if (iter == const_map->end()) {
  207. return;
  208. }
  209. dump_name = std::string("cst") + std::to_string(iter->second);
  210. }
  211. if (!dump_json_parser.NeedDump(node_name)) {
  212. return;
  213. }
  214. DumpJsonParser::GetInstance().MatchKernel(node_name);
  215. GetFileKernelName(NOT_NULL(&node_name));
  216. // check if output address exists, if not, return;
  217. if (!AnfAlgo::OutputAddrExist(anf_node, output_index)) {
  218. return;
  219. }
  220. auto addr = AnfAlgo::GetOutputAddr(anf_node, output_index);
  221. MS_EXCEPTION_IF_NULL(addr);
  222. ShapeVector int_shapes;
  223. GetDumpIntShape(anf_node, output_index, NOT_NULL(&int_shapes), trans_flag);
  224. auto type = AnfAlgo::GetOutputInferDataType(anf_node, output_index);
  225. auto device_type = AnfAlgo::GetOutputDeviceDataType(anf_node, output_index);
  226. uint64_t timestamp = GetTimeStamp();
  227. uint32_t task_id = 0;
  228. uint32_t stream_id = 0;
  229. std::string file_path = dump_path + "/Parameter." + dump_name + '.' + std::to_string(task_id) + '.' +
  230. std::to_string(stream_id) + '.' + std::to_string(timestamp) + ".output.0";
  231. if (IsDeviceTargetGPU()) {
  232. DumpGPUMemToFile(file_path, node_name, *addr, int_shapes, type, device_type, trans_flag, 0, debugger);
  233. } else {
  234. DumpMemToFile(file_path, *addr, int_shapes, type, trans_flag);
  235. }
  236. }
  237. void E2eDump::DumpParametersAndConst(const session::KernelGraph *graph, const std::string &dump_path,
  238. const Debugger *debugger) {
  239. MS_EXCEPTION_IF_NULL(graph);
  240. auto &dump_json_parser = DumpJsonParser::GetInstance();
  241. if (!dump_json_parser.OutputNeedDump()) {
  242. return;
  243. }
  244. MS_LOG(INFO) << "Start e2e dump parameters and Const values";
  245. bool trans_flag = dump_json_parser.trans_flag();
  246. std::map<std::string, size_t> const_map;
  247. GetConstantId(graph, &const_map);
  248. // dump parameters
  249. const auto &parameters = graph->inputs();
  250. for (auto &item : parameters) {
  251. DumpSingleAnfNode(item, PARAMETER_OUTPUT_INDEX, dump_path, trans_flag, &const_map, debugger);
  252. }
  253. // dump const values
  254. auto value_nodes = graph->graph_value_nodes();
  255. for (const auto &value_node : value_nodes) {
  256. DumpSingleAnfNode(value_node, VALUE_NODE_OUTPUT_INDEX, dump_path, trans_flag, &const_map, debugger);
  257. }
  258. }
  259. void E2eDump::UpdateIterDumpSetup(const session::KernelGraph *graph, bool sink_mode) {
  260. uint32_t graph_id = graph->graph_id();
  261. auto &dump_json_parser = DumpJsonParser::GetInstance();
  262. if (IsDeviceTargetGPU()) {
  263. if (starting_graph_id == INT32_MAX) {
  264. starting_graph_id = graph_id;
  265. } else if (starting_graph_id == graph_id && !MsContext::GetInstance()->get_param<bool>(MS_CTX_ENABLE_MINDRT)) {
  266. // Update dump iter for mindrt runtime is done using UpdateIterGPUDump().
  267. // Update dump iter for GPU old runtime.
  268. dump_json_parser.UpdateDumpIter();
  269. }
  270. return;
  271. }
  272. // If device target is Ascend
  273. if (sink_mode && graph->IsDatasetGraph()) {
  274. MS_LOG(INFO) << "No need to update iteration for dataset graph.";
  275. return;
  276. }
  277. // In multi network scripts, dump iter is equal to the number of networks that have been executed so far.
  278. dump_json_parser.UpdateDumpIter();
  279. }
  280. void E2eDump::DumpSetup(const session::KernelGraph *graph) {
  281. auto &dump_json_parser = DumpJsonParser::GetInstance();
  282. bool sink_mode = (ConfigManager::GetInstance().dataset_mode() || E2eDump::isDatasetGraph(graph));
  283. if (dump_json_parser.async_dump_enabled() || dump_json_parser.e2e_dump_enabled()) {
  284. UpdateIterDumpSetup(graph, sink_mode);
  285. }
  286. }
  287. void E2eDump::UpdateIterGPUDump() { DumpJsonParser::GetInstance().UpdateDumpIter(); }
  288. void E2eDump::DumpRunIter(const KernelGraphPtr &graph, uint32_t rank_id) {
  289. auto &json_parser = DumpJsonParser::GetInstance();
  290. if (!(json_parser.async_dump_enabled() || json_parser.e2e_dump_enabled())) {
  291. return;
  292. }
  293. bool sink_mode = (ConfigManager::GetInstance().dataset_mode() || graph->IsDatasetGraph());
  294. auto iter_num = SizeToInt(LongToSize(ConfigManager::GetInstance().iter_num()));
  295. if (graph->IsDatasetGraph()) {
  296. MS_LOG(INFO) << "graph: " << graph->graph_id() << " is dataset graph, not creating graph history file.";
  297. return;
  298. }
  299. std::string execution_order_path = json_parser.path() + "/rank_" + std::to_string(rank_id) + "/execution_order/";
  300. std::string file_name_to_check =
  301. execution_order_path + "/ms_global_execution_order_graph_" + std::to_string(graph->graph_id()) + ".csv";
  302. auto real_path = Common::CreatePrefixPath(file_name_to_check);
  303. if (!real_path.has_value()) {
  304. MS_LOG(WARNING) << "Check file path: " << file_name_to_check << " failed.";
  305. return;
  306. }
  307. std::string file_name = real_path.value();
  308. ChangeFileMode(file_name, S_IWUSR);
  309. std::ofstream fout(file_name, std::ofstream::app);
  310. if (!fout.is_open()) {
  311. MS_LOG(WARNING) << "Open file for saving graph global execution order failed.";
  312. return;
  313. }
  314. if (sink_mode && json_parser.async_dump_enabled()) {
  315. // for async dump when sink_mode = true, cur_dump_iter() = current_epoch
  316. // dump history for all iterations in the epoch
  317. for (int i = 0; i < iter_num; i++) {
  318. fout << std::to_string(json_parser.cur_dump_iter() * iter_num + i) + "\n";
  319. }
  320. } else {
  321. fout << std::to_string(json_parser.cur_dump_iter()) + "\n";
  322. }
  323. fout.close();
  324. ChangeFileMode(file_name, S_IRUSR);
  325. }
  326. void E2eDump::DumpData(const session::KernelGraph *graph, uint32_t rank_id, const Debugger *debugger) {
  327. MS_EXCEPTION_IF_NULL(graph);
  328. bool success = false;
  329. auto &dump_json_parser = DumpJsonParser::GetInstance();
  330. uint32_t graph_id = graph->graph_id();
  331. if (dump_json_parser.GetIterDumpFlag()) {
  332. MS_LOG(INFO) << "Start e2e dump. Current iteration is " << dump_json_parser.cur_dump_iter();
  333. MS_LOG(INFO) << "Current graph id is " << graph_id;
  334. std::string dump_path = GenerateDumpPath(graph_id, rank_id);
  335. DumpInput(graph, dump_path, debugger);
  336. DumpOutput(graph, dump_path, debugger);
  337. DumpParametersAndConst(graph, dump_path, debugger);
  338. success = true;
  339. }
  340. if (success) {
  341. MS_LOG(DEBUG) << "E2eDump Dump Data completed!";
  342. } else {
  343. MS_LOG(DEBUG) << "E2eDump Dump has not occurred!";
  344. }
  345. }
  346. bool E2eDump::DumpSingleNodeData(const CNodePtr &node, uint32_t graph_id, uint32_t rank_id, const Debugger *debugger) {
  347. bool success = false;
  348. auto &dump_json_parser = DumpJsonParser::GetInstance();
  349. if (dump_json_parser.GetIterDumpFlag()) {
  350. std::string dump_path = GenerateDumpPath(graph_id, rank_id);
  351. DumpInputSingleNode(node, dump_path, debugger);
  352. DumpOutputSingleNode(node, dump_path, debugger);
  353. success = true;
  354. }
  355. return success;
  356. }
  357. bool E2eDump::DumpParametersAndConstData(const session::KernelGraph *graph, uint32_t rank_id,
  358. const Debugger *debugger) {
  359. bool success = false;
  360. uint32_t graph_id = graph->graph_id();
  361. auto &dump_json_parser = DumpJsonParser::GetInstance();
  362. if (dump_json_parser.GetIterDumpFlag()) {
  363. MS_LOG(INFO) << "DumpParametersAndConst. Current iteration is " << dump_json_parser.cur_dump_iter();
  364. MS_LOG(INFO) << "Current graph id is " << graph_id;
  365. std::string dump_path = GenerateDumpPath(graph_id, rank_id);
  366. DumpParametersAndConst(graph, dump_path, debugger);
  367. success = true;
  368. }
  369. return success;
  370. }
  371. bool E2eDump::isDatasetGraph(const session::KernelGraph *graph) {
  372. // check if there is GetNext or InitDataSetQueue node
  373. const auto &nodes = graph->execution_order();
  374. for (const auto &node : nodes) {
  375. auto node_name = AnfAlgo::GetCNodeName(node);
  376. if (node_name == prim::kPrimGetNext->name() || node_name == prim::kPrimInitDataSetQueue->name()) {
  377. return true;
  378. }
  379. }
  380. return false;
  381. }
  382. bool E2eDump::DumpDirExists(const std::string &dump_path) {
  383. DIR *dir = opendir(dump_path.c_str());
  384. if (dir != nullptr) {
  385. MS_LOG(INFO) << "Dump dir " << dump_path << " exists";
  386. if (closedir(dir) == -1) {
  387. MS_LOG(WARNING) << "Dump dir " << dump_path << " close failed!";
  388. }
  389. return true;
  390. }
  391. return false;
  392. }
  393. } // namespace mindspore