You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dump_utils.cc 7.1 kB

4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /**
  2. * Copyright 2021-2022 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "debug/data_dump/dump_utils.h"
  17. #include <map>
  18. #include <vector>
  19. #include <algorithm>
  20. #include "utils/ms_device_shape_transfer.h"
  21. #include "utils/ms_context.h"
  22. #include "debug/anf_ir_utils.h"
  23. #include "debug/data_dump/dump_json_parser.h"
  24. #include "backend/common/session/anf_runtime_algorithm.h"
  25. #include "runtime/device/kernel_runtime_manager.h"
  26. #include "utils/utils.h"
  27. #include "debug/common.h"
  28. #include "runtime/graph_scheduler/device_tensor_store.h"
  29. using mindspore::runtime::DeviceTensorStore;
  30. namespace mindspore {
  31. uint32_t ConvertPhysicalDeviceId(uint32_t device_id) {
  32. auto context = MsContext::GetInstance();
  33. MS_EXCEPTION_IF_NULL(context);
  34. auto device_target = context->get_param<std::string>(MS_CTX_DEVICE_TARGET);
  35. auto kernel_runtime = device::KernelRuntimeManager::Instance().GetSingleKernelRuntime(device_target, device_id);
  36. MS_EXCEPTION_IF_NULL(kernel_runtime);
  37. return kernel_runtime->device_id();
  38. }
  39. /*
  40. * Feature group: Dump.
  41. * Target device group: Ascend, GPU and CPU.
  42. * Runtime category: Old runtime, MindRT.
  43. * Description: Generate dir path to dump data. It will be in these formats:
  44. * 1) tensor/statistic: /dump_path/rank_{rank_id}/{net_name}/{graph_id}/{iter_num}.
  45. * 2) constant data: /dump_path/rank_{rank_id}/{net_name}/{graph_id}/constants/.
  46. */
  47. std::string GenerateDumpPath(uint32_t graph_id, uint32_t rank_id, bool is_cst) {
  48. auto &dump_json_parser = DumpJsonParser::GetInstance();
  49. std::string net_name = dump_json_parser.net_name();
  50. std::string iterator = std::to_string(dump_json_parser.cur_dump_iter());
  51. std::string dump_path = dump_json_parser.path();
  52. if (dump_path.back() != '/') {
  53. dump_path += "/";
  54. }
  55. if (is_cst) {
  56. dump_path += ("rank_" + std::to_string(rank_id) + "/" + net_name + "/" + std::to_string(graph_id) + "/constants/");
  57. } else {
  58. dump_path +=
  59. ("rank_" + std::to_string(rank_id) + "/" + net_name + "/" + std::to_string(graph_id) + "/" + iterator + "/");
  60. }
  61. return dump_path;
  62. }
  63. void GetFileKernelName(NotNull<std::string *> kernel_name) {
  64. const std::string strsrc = "/";
  65. const std::string strdst = "--";
  66. std::string::size_type pos = 0;
  67. std::string::size_type srclen = strsrc.size();
  68. std::string::size_type dstlen = strdst.size();
  69. while ((pos = kernel_name->find(strsrc, pos)) != std::string::npos) {
  70. kernel_name->replace(pos, srclen, strdst);
  71. pos += dstlen;
  72. }
  73. }
  74. /*
  75. * Feature group: Dump.
  76. * Target device group: Ascend, GPU and CPU.
  77. * Runtime category: Old runtime, MindRT.
  78. * Description: Get the actual tensor shape for dumping based on trans_flag option in configuration json file.
  79. */
  80. void GetDumpIntShape(const AnfNodePtr &node, size_t index, NotNull<ShapeVector *> int_shapes, bool trans_flag) {
  81. if (trans_flag) {
  82. *int_shapes = trans::GetRuntimePaddingShape(node, index);
  83. } else {
  84. auto shape = AnfAlgo::GetOutputDeviceShape(node, index);
  85. (void)std::transform(shape.begin(), shape.end(), std::back_inserter(*int_shapes),
  86. [](size_t inner_item) { return SizeToInt(inner_item); });
  87. }
  88. }
  89. const DeviceTensorPtr GetParameterInfo(const AnfNodePtr &node, NotNull<ShapeVector *> int_shapes,
  90. NotNull<TypeId *> host_type, NotNull<TypeId *> device_type) {
  91. const auto &device_tensors = DeviceTensorStore::GetInstance().Fetch(node.get());
  92. if (device_tensors.size() < 1) {
  93. return nullptr;
  94. }
  95. auto device_addr = device_tensors[0];
  96. MS_EXCEPTION_IF_NULL(device_addr);
  97. auto &dump_json_parser = DumpJsonParser::GetInstance();
  98. bool trans_flag = dump_json_parser.trans_flag();
  99. auto ref_node = device_addr->GetNodeIndex().first;
  100. MS_EXCEPTION_IF_NULL(ref_node);
  101. GetDumpIntShape(ref_node, PARAMETER_OUTPUT_INDEX, int_shapes, trans_flag);
  102. *host_type = AnfAlgo::GetOutputInferDataType(ref_node, PARAMETER_OUTPUT_INDEX);
  103. *device_type = AnfAlgo::GetOutputDeviceDataType(ref_node, PARAMETER_OUTPUT_INDEX);
  104. return device_addr;
  105. }
  106. /*
  107. * Feature group: Dump.
  108. * Target device group: Ascend, CPU.
  109. * Runtime category: Old runtime, MindRT.
  110. * Description: Dump the data in memory into file path.
  111. */
  112. void DumpMemToFile(const std::string &file_path, const device::DeviceAddress &addr, const ShapeVector &int_shapes,
  113. const TypeId &type, bool trans_flag) {
  114. auto format = kOpFormat_DEFAULT;
  115. auto ret = addr.DumpMemToFile(file_path, format, int_shapes, type, trans_flag);
  116. if (!ret) {
  117. MS_LOG(ERROR) << "DumpMemToFile Failed: flag:" << trans_flag << ", path:" << file_path << ", host_format:" << format
  118. << ".!";
  119. }
  120. }
  121. uint64_t GetTimeStamp() {
  122. auto cur_sys_time = std::chrono::system_clock::now();
  123. uint64_t timestamp = std::chrono::duration_cast<std::chrono::microseconds>(cur_sys_time.time_since_epoch()).count();
  124. return timestamp;
  125. }
  126. /*
  127. * Feature group: Dump.
  128. * Target device group: Ascend, GPU, CPU.
  129. * Runtime category: Old runtime, MindRT.
  130. * Description: Remove scope from operator name. The default separator is "--".
  131. */
  132. std::string GetOpNameWithoutScope(const std::string &fullname_with_scope, const std::string &separator) {
  133. std::size_t found = fullname_with_scope.rfind(separator);
  134. std::string op_name;
  135. if (found != std::string::npos) {
  136. op_name = fullname_with_scope.substr(found + separator.length());
  137. }
  138. return op_name;
  139. }
  140. /*
  141. * Feature group: Dump.
  142. * Target device group: Ascend, GPU, CPU.
  143. * Runtime category: Old runtime, MindRT.
  144. * Description: Dump string content into file path. Current purpose is to save operator overflow information in json
  145. * file in ascend a+m dump mode.
  146. */
  147. void DumpToFile(const std::string &file_name, const std::string &dump_str) {
  148. if (dump_str.empty()) {
  149. MS_LOG(ERROR) << "Failed to dump empty tensor data.";
  150. return;
  151. }
  152. auto real_path = Common::CreatePrefixPath(file_name);
  153. if (!real_path.has_value()) {
  154. MS_LOG(ERROR) << "CreatePrefixPath failed.";
  155. return;
  156. }
  157. std::string real_path_str = real_path.value();
  158. ChangeFileMode(real_path_str, S_IWUSR);
  159. std::ofstream file(real_path_str, std::ofstream::out | std::ofstream::trunc);
  160. if (!file.is_open()) {
  161. MS_LOG(EXCEPTION) << "Open file " << real_path_str << "failed: " << ErrnoToString(errno);
  162. }
  163. file << dump_str;
  164. if (file.bad()) {
  165. file.close();
  166. MS_LOG(EXCEPTION) << "Dump string to file " << real_path_str << " failed: " << ErrnoToString(errno);
  167. }
  168. file.close();
  169. ChangeFileMode(real_path_str, S_IRUSR);
  170. }
  171. } // namespace mindspore