You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

analyser.py 12 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """The specific analyser class."""
  16. import csv
  17. import json
  18. import os
  19. from mindinsight.profiler.analyser.base_analyser import BaseAnalyser
  20. from mindinsight.profiler.common.log import logger
  21. class AicoreTypeAnalyser(BaseAnalyser):
  22. """
  23. The analyser for analyzing the AICORE operator types.
  24. Args:
  25. profiling_dir (str): The directory where the parsed profiling files are
  26. located.
  27. device_id (str): The device ID.
  28. Raises:
  29. ProfilerPathErrorException: If the profiling dir is invalid.
  30. """
  31. _col_names = ['op_type', 'execution_time', 'execution_frequency', 'percent']
  32. _col_names_in_result = ['op_type', 'execution_time (ms)',
  33. 'execution_frequency', 'percent']
  34. _file_name_aicore_type_time = 'aicore_intermediate_{}_type.csv'
  35. def _load(self):
  36. """Load data according to the parsed AICORE operator types file."""
  37. op_type_file_path = os.path.join(
  38. self._profiling_dir,
  39. self._file_name_aicore_type_time.format(self._device_id)
  40. )
  41. if not os.path.isfile(op_type_file_path):
  42. logger.warning('The file <%s> does not exist.', op_type_file_path)
  43. return
  44. with open(op_type_file_path, 'r') as file:
  45. csv_reader = csv.reader(file)
  46. _ = next(csv_reader)
  47. for info in csv_reader:
  48. self._data.append(self._convert_field_type(info))
  49. def _filter(self, filter_condition):
  50. """
  51. Filter the profiling data according to the filter condition.
  52. Args:
  53. filter_condition (dict): The filter condition.
  54. """
  55. def _inner_filter(item: list):
  56. return self._default_filter(item, filter_condition)
  57. self._result = list(filter(_inner_filter, self._data))
  58. def _organize_query_result(self):
  59. """
  60. Organize the query result.
  61. Returns:
  62. dict, the query result.
  63. """
  64. for item in self._result:
  65. item[1] = float(format(item[1], '.6f'))
  66. self._display_col_names = self._col_names_in_result[:]
  67. return super()._organize_query_result()
  68. def _convert_field_type(self, row):
  69. """
  70. Convert the field type to the specific type.
  71. Args:
  72. row (list[str]): One row data from parsed data.
  73. Returns:
  74. list[Union[str, float]], the converted data.
  75. """
  76. return [row[0], float(row[1]), int(row[2]), float(row[3])]
  77. class AicoreDetailAnalyser(BaseAnalyser):
  78. """
  79. The analyser for analyzing all the AICORE operators.
  80. Args:
  81. profiling_dir (str): The directory where the parsed profiling files are
  82. located.
  83. device_id (str): The device ID.
  84. Raises:
  85. ProfilerPathErrorException: If the profiling dir is invalid.
  86. """
  87. _col_names = ['op_name', 'op_type', 'avg_execution_time', 'subgraph',
  88. 'full_op_name', 'op_info']
  89. _col_names_in_result = ['op_name', 'op_type', 'avg_execution_time (ms)',
  90. 'subgraph', 'full_op_name', 'op_info']
  91. _file_name_aicore_detail_time = 'aicore_intermediate_{}_detail.csv'
  92. _file_name_framework_info = 'framework_raw_{}.csv'
  93. def __init__(self, profiling_dir, device_id):
  94. super().__init__(profiling_dir, device_id)
  95. self._none_filter_condition_key = [
  96. 'is_display_detail', 'is_display_full_op_name'
  97. ]
  98. self._none_sort_col_names = ['op_info']
  99. def query_and_sort_by_op_type(self, filter_condition, op_type_order: list):
  100. """
  101. Query the AICORE operator detail information by `filter_condition`,
  102. and sort by `op_type_order` and execution time.
  103. Args:
  104. filter_condition (dict): The filter condition.
  105. op_type_order (list[str]): The name of the operator type in order.
  106. Returns:
  107. dict, The results are filtered and sorted.
  108. """
  109. if filter_condition is None:
  110. filter_condition = {}
  111. self._filter(filter_condition)
  112. type_detail_cache = {}
  113. for detail_info in self._result:
  114. op_type = detail_info[1]
  115. if op_type not in op_type_order:
  116. continue
  117. infos = type_detail_cache.get(op_type)
  118. if infos:
  119. infos.append(detail_info)
  120. else:
  121. type_detail_cache[op_type] = [detail_info]
  122. result = []
  123. for op_type in op_type_order:
  124. detail_infos = type_detail_cache.get(op_type)
  125. if detail_infos is None:
  126. continue
  127. detail_infos.sort(key=lambda item: item[2], reverse=True)
  128. result.extend(detail_infos)
  129. return {
  130. 'col_name': self._display_col_names,
  131. 'object': result
  132. }
  133. def _load(self):
  134. """Load data according to the parsed AICORE operator file."""
  135. op_detail_file_path = os.path.join(
  136. self._profiling_dir,
  137. self._file_name_aicore_detail_time.format(self._device_id)
  138. )
  139. framework_file_path = os.path.join(
  140. self._profiling_dir,
  141. self._file_name_framework_info.format(self._device_id)
  142. )
  143. if not os.path.isfile(op_detail_file_path):
  144. logger.warning('The file <%s> does not exist.', op_detail_file_path)
  145. return
  146. if not os.path.isfile(framework_file_path):
  147. logger.warning('The file <%s> does not exist.', framework_file_path)
  148. return
  149. framework_infos = dict()
  150. with open(framework_file_path, 'r') as file:
  151. csv_reader = csv.reader(file)
  152. _ = next(csv_reader)
  153. for info in csv_reader:
  154. framework_infos[info[3]] = self._convert_framework_field_type(
  155. info
  156. )
  157. with open(op_detail_file_path, 'r') as file:
  158. csv_reader = csv.reader(file)
  159. _ = next(csv_reader)
  160. for info in csv_reader:
  161. detail_info = self._get_op_detail_info(info, framework_infos)
  162. self._data.append(detail_info)
  163. del framework_infos
  164. def _filter(self, filter_condition):
  165. """
  166. Filter the profiling data according to the filter condition.
  167. Args:
  168. filter_condition (dict): The filter condition.
  169. """
  170. def _inner_filter(item: list):
  171. return self._default_filter(item, filter_condition)
  172. def _inner_map(item: list):
  173. inner_item = item[0:4]
  174. if is_display_full_op_name:
  175. inner_item.append(item[4])
  176. if is_display_detail:
  177. inner_item.append(item[5])
  178. return inner_item
  179. is_display_detail = filter_condition.get('is_display_detail', True)
  180. is_display_full_op_name = filter_condition.get(
  181. 'is_display_full_op_name', True
  182. )
  183. self._set_display_col_name(is_display_detail, is_display_full_op_name)
  184. if is_display_detail and is_display_full_op_name:
  185. self._result = list(filter(_inner_filter, self._data))
  186. else:
  187. self._result = list(
  188. map(_inner_map, filter(_inner_filter, self._data))
  189. )
  190. def _set_display_col_name(self, is_display_detail, is_display_full_op_name):
  191. """
  192. Set the display column name according to the filter condition.
  193. Args:
  194. is_display_detail (bool): Whether to display the detailed operator
  195. information.
  196. is_display_full_op_name (bool): Whether to display the operator full
  197. name.
  198. """
  199. self._display_col_names = self._col_names_in_result[0:4]
  200. if is_display_full_op_name:
  201. self._display_col_names.append(self._col_names_in_result[4])
  202. if is_display_detail:
  203. self._display_col_names.append(self._col_names_in_result[5])
  204. def _convert_framework_field_type(self, row):
  205. """
  206. Convert the field type of framework file to the specific type.
  207. Args:
  208. row (list[str]): One row data from parsed data.
  209. Returns:
  210. list[Union[str, float]], the converted data.
  211. """
  212. return [row[3], row[4], row[5], row[6],
  213. json.loads(row[7]) if row[7] else None]
  214. def _get_op_detail_info(self, row, framework_infos):
  215. """
  216. Get operator detail information.
  217. Args:
  218. row (list[str]): One row data from parsed operator file.
  219. framework_infos (dict): All framework information.
  220. Returns:
  221. list[Union[str, float]], the operator detail information in one row.
  222. """
  223. framework_info = framework_infos.get(row[0])
  224. return [framework_info[1], framework_info[2], float(row[1]),
  225. framework_info[3], framework_info[0], framework_info[4]]
  226. class AicpuAnalyser(BaseAnalyser):
  227. """
  228. The analyser for analyzing all the AICPU operators.
  229. Args:
  230. profiling_dir (str): The directory where the parsed profiling files are
  231. located.
  232. device_id (str): The device ID.
  233. Raises:
  234. ProfilerPathErrorException: If the profiling dir is invalid.
  235. """
  236. _col_names = ['serial_number', 'op_type', 'total_time', 'dispatch_time',
  237. 'run_start', 'run_end']
  238. _col_names_in_result = ['serial_number', 'op_type', 'total_time (ms)',
  239. 'dispatch_time (ms)', 'run_start', 'run_end']
  240. _file_name_aicpu_time = 'aicpu_intermediate_{}.csv'
  241. def _load(self):
  242. """Load data according to the parsed AICPU operator file."""
  243. aicpu_file_path = os.path.join(
  244. self._profiling_dir,
  245. self._file_name_aicpu_time.format(self._device_id)
  246. )
  247. if not os.path.isfile(aicpu_file_path):
  248. logger.warning('The file <%s> does not exist.', aicpu_file_path)
  249. return
  250. with open(aicpu_file_path, 'r') as file:
  251. csv_reader = csv.reader(file)
  252. _ = next(csv_reader)
  253. for info in csv_reader:
  254. aicpu_info = self._convert_field_type(info)
  255. self._data.append(aicpu_info)
  256. def _filter(self, filter_condition):
  257. """
  258. Filter the profiling data according to the filter condition.
  259. Args:
  260. filter_condition (dict): The filter condition.
  261. """
  262. def _inner_filter(item: list):
  263. return self._default_filter(item, filter_condition)
  264. self._result = list(filter(_inner_filter, self._data))
  265. def _organize_query_result(self):
  266. """
  267. Organize the query result.
  268. Returns:
  269. dict, the query result.
  270. """
  271. self._display_col_names = self._col_names_in_result[:]
  272. return super()._organize_query_result()
  273. def _convert_field_type(self, row):
  274. """
  275. Convert the field type to the specific type.
  276. Args:
  277. row (list[str]): One row data from parsed data.
  278. Returns:
  279. list[Union[str, float]], the converted data.
  280. """
  281. return [int(row[0]), row[1], float(row[2]), float(row[3]), int(row[4]),
  282. int(row[5])]