You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

analyser.py 11 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """The specific analyser class."""
  16. import csv
  17. import json
  18. import os
  19. from mindinsight.profiler.analyser.base_analyser import BaseAnalyser
  20. from mindinsight.profiler.common.log import logger
  21. class AicoreTypeAnalyser(BaseAnalyser):
  22. """
  23. The analyser for analyzing the AICORE operator types.
  24. Args:
  25. profiling_dir (str): The directory where the parsed profiling files are
  26. located.
  27. device_id (str): The device ID.
  28. """
  29. __col_names__ = ['op_type', 'execution_time', 'execution_frequency',
  30. 'percent']
  31. _file_name_aicore_type_time = 'aicore_intermediate_{}_type.csv'
  32. def _load(self):
  33. """Load data according to the parsed AICORE operator types file."""
  34. op_type_file_path = os.path.join(
  35. self._profiling_dir,
  36. self._file_name_aicore_type_time.format(self._device_id)
  37. )
  38. if not os.path.isfile(op_type_file_path):
  39. logger.warning('The file <%s> does not exist.', op_type_file_path)
  40. return
  41. with open(op_type_file_path, 'r') as file:
  42. csv_reader = csv.reader(file)
  43. _ = next(csv_reader)
  44. for info in csv_reader:
  45. self._data.append(self._convert_field_type(info))
  46. def _filter(self, filter_condition):
  47. """
  48. Filter the profiling data according to the filter condition.
  49. Args:
  50. filter_condition (dict): The filter condition.
  51. """
  52. def _inner_filter(item: list):
  53. return self._default_filter(item, filter_condition)
  54. self._result = list(filter(_inner_filter, self._data))
  55. def _convert_field_type(self, row):
  56. """
  57. Convert the field type to the specific type.
  58. Args:
  59. row (list[str]): One row data from parsed data.
  60. Returns:
  61. list[Union[str, float]], the converted data.
  62. """
  63. return [row[0], float(row[1]), int(row[2]), float(row[3])]
  64. class AicoreDetailAnalyser(BaseAnalyser):
  65. """
  66. The analyser for analyzing all the AICORE operators.
  67. Args:
  68. profiling_dir (str): The directory where the parsed profiling files are
  69. located.
  70. device_id (str): The device ID.
  71. """
  72. __col_names__ = ['op_name', 'op_type', 'execution_time', 'subgraph',
  73. 'full_op_name', 'op_info']
  74. _file_name_aicore_detail_time = 'aicore_intermediate_{}_detail.csv'
  75. _file_name_framework_info = 'framework_raw_{}.csv'
  76. def __init__(self, profiling_dir, device_id):
  77. super().__init__(profiling_dir, device_id)
  78. self._none_filter_condition_key = [
  79. 'is_display_detail', 'is_display_full_op_name'
  80. ]
  81. def query_and_sort_by_op_type(self, filter_condition, op_type_order: list):
  82. """
  83. Query the AICORE operator detail information by `filter_condition`,
  84. and sort by `op_type_order` and execution time.
  85. Args:
  86. filter_condition (dict): The filter condition.
  87. op_type_order (list[str]): The name of the operator type in order.
  88. Returns:
  89. dict, The results are filtered and sorted.
  90. """
  91. if filter_condition is None:
  92. filter_condition = {}
  93. self._filter(filter_condition)
  94. type_detail_cache = {}
  95. for detail_info in self._result:
  96. op_type = detail_info[1]
  97. if op_type not in op_type_order:
  98. continue
  99. infos = type_detail_cache.get(op_type)
  100. if infos:
  101. infos.append(detail_info)
  102. else:
  103. type_detail_cache[op_type] = [detail_info]
  104. result = []
  105. for op_type in op_type_order:
  106. detail_infos = type_detail_cache.get(op_type)
  107. if detail_infos is None:
  108. continue
  109. detail_infos.sort(key=lambda item: item[2], reverse=True)
  110. result.extend(detail_infos)
  111. return {
  112. 'col_name': self._display_col_names,
  113. 'object': result
  114. }
  115. def _load(self):
  116. """Load data according to the parsed AICORE operator file."""
  117. op_detail_file_path = os.path.join(
  118. self._profiling_dir,
  119. self._file_name_aicore_detail_time.format(self._device_id)
  120. )
  121. framework_file_path = os.path.join(
  122. self._profiling_dir,
  123. self._file_name_framework_info.format(self._device_id)
  124. )
  125. if not os.path.isfile(op_detail_file_path):
  126. logger.warning('The file <%s> does not exist.', op_detail_file_path)
  127. return
  128. if not os.path.isfile(framework_file_path):
  129. logger.warning('The file <%s> does not exist.', framework_file_path)
  130. return
  131. framework_infos = dict()
  132. with open(framework_file_path, 'r') as file:
  133. csv_reader = csv.reader(file)
  134. _ = next(csv_reader)
  135. for info in csv_reader:
  136. framework_infos[info[3]] = self._convert_framework_field_type(
  137. info
  138. )
  139. with open(op_detail_file_path, 'r') as file:
  140. csv_reader = csv.reader(file)
  141. _ = next(csv_reader)
  142. for info in csv_reader:
  143. detail_info = self._get_op_detail_info(info, framework_infos)
  144. self._data.append(detail_info)
  145. del framework_infos
  146. def _filter(self, filter_condition):
  147. """
  148. Filter the profiling data according to the filter condition.
  149. Args:
  150. filter_condition (dict): The filter condition.
  151. """
  152. def _inner_filter(item: list):
  153. return self._default_filter(item, filter_condition)
  154. def _inner_map(item: list):
  155. inner_item = item[0:4]
  156. if is_display_full_op_name:
  157. inner_item.append(item[4])
  158. if is_display_detail:
  159. inner_item.append(item[5])
  160. return inner_item
  161. is_display_detail = filter_condition.get('is_display_detail', True)
  162. is_display_full_op_name = filter_condition.get(
  163. 'is_display_full_op_name', True
  164. )
  165. self._set_display_col_name(is_display_detail, is_display_full_op_name)
  166. if is_display_detail and is_display_full_op_name:
  167. self._result = list(filter(_inner_filter, self._data))
  168. else:
  169. self._result = list(
  170. map(_inner_map, filter(_inner_filter, self._data))
  171. )
  172. def _set_display_col_name(self, is_display_detail, is_display_full_op_name):
  173. """
  174. Set the display column name according to the filter condition.
  175. Args:
  176. is_display_detail (bool): Whether to display the detailed operator
  177. information.
  178. is_display_full_op_name (bool): Whether to display the operator full
  179. name.
  180. """
  181. self._display_col_names = self.__col_names__[0:4]
  182. if is_display_full_op_name:
  183. self._display_col_names.append(self.__col_names__[4])
  184. if is_display_detail:
  185. self._display_col_names.append(self.__col_names__[5])
  186. def _convert_framework_field_type(self, row):
  187. """
  188. Convert the field type of framework file to the specific type.
  189. Args:
  190. row (list[str]): One row data from parsed data.
  191. Returns:
  192. list[Union[str, float]], the converted data.
  193. """
  194. return [row[3], row[4], row[5], row[6],
  195. json.loads(row[7]) if row[7] else None]
  196. def _get_op_detail_info(self, row, framework_infos):
  197. """
  198. Get operator detail information.
  199. Args:
  200. row (list[str]): One row data from parsed operator file.
  201. framework_infos (dict): All framework information.
  202. Returns:
  203. list[Union[str, float]], the operator detail information in one row.
  204. """
  205. framework_info = framework_infos.get(row[0])
  206. return [framework_info[1], framework_info[2], float(row[1]),
  207. framework_info[3], framework_info[0], framework_info[4]]
  208. class AicpuAnalyser(BaseAnalyser):
  209. """
  210. The analyser for analyzing all the AICPU operators.
  211. Args:
  212. profiling_dir (str): The directory where the parsed profiling files are
  213. located.
  214. device_id (str): The device ID.
  215. """
  216. __col_names__ = ['serial_number', 'op_name', 'total_time', 'dispatch_time',
  217. 'RunV2_start', 'compute_start', 'memcpy_start',
  218. 'memcpy_end', 'RunV2_end']
  219. _file_name_aicpu_time = 'aicpu_intermediate_{}.csv'
  220. def _load(self):
  221. """Load data according to the parsed AICPU operator file."""
  222. aicpu_file_path = os.path.join(
  223. self._profiling_dir,
  224. self._file_name_aicpu_time.format(self._device_id)
  225. )
  226. if not os.path.isfile(aicpu_file_path):
  227. logger.warning('The file <%s> does not exist.', aicpu_file_path)
  228. return
  229. with open(aicpu_file_path, 'r') as file:
  230. csv_reader = csv.reader(file)
  231. _ = next(csv_reader)
  232. for info in csv_reader:
  233. aicpu_info = self._convert_field_type(info)
  234. self._data.append(aicpu_info)
  235. def _filter(self, filter_condition):
  236. """
  237. Filter the profiling data according to the filter condition.
  238. Args:
  239. filter_condition (dict): The filter condition.
  240. """
  241. def _inner_filter(item: list):
  242. return self._default_filter(item, filter_condition)
  243. self._result = list(filter(_inner_filter, self._data))
  244. def _convert_field_type(self, row):
  245. """
  246. Convert the field type to the specific type.
  247. Args:
  248. row (list[str]): One row data from parsed data.
  249. Returns:
  250. list[Union[str, float]], the converted data.
  251. """
  252. return [int(row[0]), row[1], float(row[2]), float(row[3]), int(row[4]),
  253. int(row[5]), int(row[6]), int(row[7]), int(row[8])]