You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

optime_parser.py 11 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """Op compute time files parser."""
  16. import os
  17. import stat
  18. from mindspore.profiler.common.util import fwrite_format
  19. from mindspore.profiler.common.exceptions.exceptions import ProfilerFileNotFoundException, \
  20. ProfilerIOException
  21. from mindspore import log as logger
  22. from mindspore.profiler.common.validator.validate_path import validate_and_normalize_path
  23. from mindspore.profiler.parser.container import HWTSContainer
  24. TIMELINE_FILE_COLUMN_TITLE = 'op_name, stream_id, start_time(ms), duration(ms)'
  25. class OPComputeTimeParser:
  26. """
  27. Join hwts info and framework info, get op time info, and output to the result file.
  28. Args:
  29. hwts_output_file (str): The file path of hwts_output_file. Such as: './output_format_data_hwts_0.txt".
  30. output_filename (str): The output data file path and name. Such as: './output_op_compute_time_0.txt'.
  31. op_task_info (dict): The task and op relation info. The format: {task_id, [opname, stream_id, block dim]}.
  32. """
  33. _dst_file_title = 'title:op compute time'
  34. _dst_file_column_title = 'op_name compute_time(ms) stream_id'
  35. _dst_file_column_title += '\n------------ --------------- ---------'
  36. def __init__(self, hwts_output_file, output_filename, op_task_info,
  37. output_path, device_id):
  38. hwts_output_file = validate_and_normalize_path(hwts_output_file)
  39. self._hwts_output_file = hwts_output_file
  40. self._output_filename = output_filename
  41. self._op_task_info = op_task_info
  42. self._output_path = output_path
  43. self._device_id = device_id
  44. self._min_cycle_counter = float("inf")
  45. def _get_op_task_id_map(self):
  46. """
  47. Read hwts data file, get the task time info.
  48. Returns:
  49. list: all hwts task time info.
  50. """
  51. op_map_result = []
  52. hwts_list = []
  53. if not os.path.exists(self._hwts_output_file):
  54. logger.error('The hwts output file does not exist.')
  55. raise ProfilerFileNotFoundException('hwts output file')
  56. with open(self._hwts_output_file, 'r') as data_file:
  57. lines = data_file.readlines()
  58. for line in lines:
  59. if line.startswith("Start of task") or line.startswith("End of task"):
  60. line_split = line.split()
  61. container = HWTSContainer(line_split)
  62. hwts_list.append(container)
  63. # hwts op map by taskId
  64. for hwts in hwts_list:
  65. if hwts.task_id in self._op_task_info.keys():
  66. hwts.op_name = self._op_task_info[hwts.task_id]
  67. op_map_result.append(hwts)
  68. return op_map_result
  69. def execute(self):
  70. """Execute the parser, compute all op, get op time, and write it to the output file."""
  71. # Calculate the execution time of operators,
  72. # and update the minimum cycle counter.
  73. tmp_result_data = self._calculate_op_execution_time()
  74. # Convert time units from nanoseconds to milliseconds.
  75. # The unit of the cycle counter is 10 nanoseconds.
  76. op_name_time_dict = {}
  77. op_name_stream_dict = {}
  78. op_name_count_dict = {}
  79. op_name_task_dict = {}
  80. op_name_start_time = {}
  81. self._convert_op_time_unit(
  82. tmp_result_data, op_name_time_dict, op_name_stream_dict,
  83. op_name_count_dict, op_name_task_dict, op_name_start_time
  84. )
  85. result_data = ""
  86. total_time = 0
  87. for op_name, time in op_name_time_dict.items():
  88. if op_name in op_name_stream_dict.keys():
  89. stream_id = op_name_stream_dict[op_name]
  90. avg_time = time / op_name_count_dict[op_name]
  91. total_time += avg_time
  92. result_data += ("%s %s %s\n" %(op_name, str(avg_time), stream_id))
  93. result_data += ("total op %s 0" %(str(total_time)))
  94. timeline_data = []
  95. for op_name, time in op_name_time_dict.items():
  96. if op_name in op_name_stream_dict.keys():
  97. stream_id = op_name_stream_dict[op_name]
  98. start_time_list = op_name_start_time.get(op_name)
  99. for (start_time, duration) in start_time_list:
  100. timeline_data.append([op_name, stream_id, start_time, duration])
  101. # Write the metadata of operators into the file,
  102. # including operator name, average time, and stream id.
  103. self._write_op_time_into_file(result_data)
  104. # Write the timeline data into file,
  105. # including operator name, stream id, start time, and duration.
  106. self._write_timeline_data_into_file(timeline_data)
  107. def _write_op_time_into_file(self, result_data):
  108. """
  109. Write the metadata of operators into the file, including
  110. op name, average time, and stream id.
  111. Args:
  112. result_data (str): The metadata to be written into the file.
  113. 'op_name_1', 'avg_time_1', 'stream_id_1',
  114. 'op_name_2', 'avg_time_2', 'stream_id_2',
  115. ...
  116. """
  117. fwrite_format(self._output_filename, data_source=self._dst_file_title, is_start=True)
  118. fwrite_format(self._output_filename, data_source=self._dst_file_column_title)
  119. fwrite_format(self._output_filename, data_source=result_data)
  120. def _write_timeline_data_into_file(self, timeline_data):
  121. """
  122. Write the timeline information into the file, including
  123. operator name, stream id, start time and duration.
  124. Args:
  125. timeline_data (list): The metadata to be written into the file.
  126. [
  127. ['op_name_1', 'stream_id_1', 'start_time_1', 'durarion_1'],
  128. ['op_name_2', 'stream_id_2', 'start_time_2', 'durarion_2'],
  129. [...]
  130. ]
  131. """
  132. # sorted by start times
  133. timeline_data.sort(key=lambda x: float(x[2]))
  134. filename = 'output_timeline_data_{}.txt'.format(self._device_id)
  135. file_path = os.path.join(self._output_path, filename)
  136. file_path = validate_and_normalize_path(file_path)
  137. # write to file
  138. try:
  139. with open(file_path, 'w') as f_obj:
  140. f_obj.write(TIMELINE_FILE_COLUMN_TITLE + '\n')
  141. for timeline in timeline_data:
  142. timeline = [str(item) for item in timeline]
  143. f_obj.write(','.join(timeline) + '\n')
  144. os.chmod(file_path, stat.S_IREAD | stat.S_IWRITE)
  145. except (IOError, OSError) as err:
  146. logger.error('Error occurred when writing intermediate timeline file: %s', err)
  147. raise ProfilerIOException
  148. def _calculate_op_execution_time(self):
  149. """
  150. Calculate the execution time of each operator.
  151. Returns:
  152. list, including the intermediate data of op execution time.
  153. """
  154. tmp_result_data = []
  155. op_map_list = self._get_op_task_id_map()
  156. cur_index = 0
  157. length = len(op_map_list)
  158. min_cycle_counter = float("inf")
  159. while cur_index < length:
  160. if cur_index + 1 == length:
  161. break
  162. op_start = op_map_list[cur_index]
  163. op_end = op_map_list[cur_index + 1]
  164. if op_start.status == "Start" and op_end.status == "End" \
  165. and op_start.op_name == op_end.op_name:
  166. op_start.duration = op_end.cycle_counter - op_start.cycle_counter
  167. tmp_result_data.append(op_start)
  168. cur_index += 2
  169. if not op_start.op_name.startswith("assign"):
  170. min_cycle_counter = min(min_cycle_counter, op_start.cycle_counter)
  171. else:
  172. cur_index += 1
  173. # Update the value of minimum cycle counter.
  174. self._min_cycle_counter = min_cycle_counter / 1e5 # Convert the time unit from 10ns to 1ms
  175. return tmp_result_data
  176. def _convert_op_time_unit(self, op_data_list, op_name_time_dict, op_name_stream_dict,
  177. op_name_count_dict, op_name_task_dict, op_name_start_time):
  178. """
  179. Calculate the execution time of operator and convert it into millisecond.
  180. Args:
  181. op_data_list (list): The list of operator metadata.
  182. op_name_time_dict (dict): The mapping relation of operator name and its execution time.
  183. op_name_stream_dict (dict): The mapping relation of operator name and its stream id.
  184. op_name_count_dict (dict): The mapping relation of operator name and its count.
  185. op_name_task_dict (dict): The mapping relation of operator name and its task id.
  186. op_name_start_time (dict): The mapping relation of operator name and its start time.
  187. """
  188. factor = 1e5
  189. for item in op_data_list:
  190. op_name = item.op_name
  191. # Unit conversion: converting the cycle counter into ms.
  192. op_start_time_str = str(item.cycle_counter / factor)
  193. op_duration = item.duration / factor
  194. op_duration_str = str(item.duration / factor)
  195. if op_name in op_name_time_dict.keys():
  196. op_name_time_dict[op_name] += op_duration
  197. if item.task_id == op_name_task_dict[op_name]:
  198. op_name_count_dict[op_name] += 1
  199. op_name_start_time[op_name].append(
  200. (op_start_time_str, op_duration_str)
  201. )
  202. else:
  203. op_name_time_dict[op_name] = op_duration
  204. op_name_stream_dict[op_name] = item.stream_id
  205. op_name_task_dict[op_name] = item.task_id
  206. op_name_count_dict[op_name] = 1
  207. op_name_start_time[op_name] = []
  208. op_name_start_time[op_name].append(
  209. (op_start_time_str, op_duration_str)
  210. )
  211. @property
  212. def min_cycle_counter(self):
  213. """Get minimum cycle counter."""
  214. return self._min_cycle_counter