You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

recommender.py 20 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """
  16. Predefined watchpoints.
  17. This module predefine recommend watchpoints.
  18. """
  19. import math
  20. import queue as Queue
  21. from mindinsight.debugger.conditionmgr.conditionmgr import ConditionMgr
  22. from mindinsight.debugger.conditionmgr.condition import TargetTypeEnum
  23. from mindinsight.debugger.conditionmgr.condition import ConditionIdEnum
  24. from mindinsight.debugger.conditionmgr.condition import ActivationFuncEnum
  25. from mindinsight.debugger.conditionmgr.common.utils import NodeBasicInfo
  26. from mindinsight.debugger.conditionmgr.log import logger
  27. from mindinsight.conf import settings
  28. from mindinsight.debugger.stream_cache.watchpoint import WatchNodeTree
  29. class _WatchPointData:
  30. """
  31. WatchPoint data container
  32. Args:
  33. watch_condition (dict): The dict of watch conditions.
  34. watch_nodes (list[NodeBasicInfo]): The list of node basic info.
  35. name (str): The name of watchpoint.
  36. """
  37. def __init__(self, watch_condition, watch_nodes, name):
  38. self.watch_condition = watch_condition
  39. self.watch_nodes = watch_nodes
  40. self.name = name
  41. def get_watch_condition_dict(self):
  42. return {
  43. "id": self.watch_condition.get("condition"),
  44. "params": [{
  45. "name": param.get_parameter_name(),
  46. "value": param.value
  47. } for param in self.watch_condition.get("params")]
  48. }
  49. class _ConditionParameterValue:
  50. """Condition parameter data container"""
  51. def __init__(self, parameter, value):
  52. self.parameter = parameter
  53. self.value = value
  54. def get_parameter_name(self):
  55. return self.parameter.name
  56. def recommend_watchpoints(condition_mgr: ConditionMgr, graph_stream, condition_context):
  57. """
  58. Recommend watchpoints.
  59. Args:
  60. condition_mgr (ConditionMgr): Condition manager instance.
  61. graph_stream (GraphHandler): Graph handler instance.
  62. condition_context (ConditionContext): Context for condition.
  63. Returns:
  64. list[WatchPointData], watch points to be created.
  65. """
  66. watch_points = []
  67. if not graph_stream.graph:
  68. logger.warning("Given graph is None.")
  69. return watch_points
  70. if not settings.ENABLE_RECOMMENDED_WATCHPOINTS:
  71. return watch_points
  72. # add weight watch points
  73. merged_info = get_basic_node_info(TargetTypeEnum.WEIGHT.value, graph_stream)
  74. _recommend_weight_initialization(merged_info, condition_mgr, watch_points, condition_context)
  75. _recommend_weight_change_too_large(merged_info, condition_mgr, watch_points, condition_context)
  76. # Because we cannot identify trainable weights currently, weight_no_change and weight_change_too_small will not be
  77. # recommended.
  78. trainable_weight_nodes = []
  79. _recommend_weight_not_changed(condition_mgr, trainable_weight_nodes, watch_points, condition_context)
  80. _recommend_weight_change_too_small(condition_mgr, trainable_weight_nodes, watch_points, condition_context)
  81. # add gradient watch points
  82. merged_info = get_basic_node_info(TargetTypeEnum.GRADIENT.value, graph_stream)
  83. _recommend_gradient_vanishing(merged_info, condition_mgr, watch_points, condition_context)
  84. # add tensor watch points
  85. merged_info = get_basic_node_info(TargetTypeEnum.TENSOR.value, graph_stream)
  86. _recommend_operator_overflow(merged_info, condition_mgr, watch_points, condition_context)
  87. _recommend_tensor_overflow(merged_info, condition_mgr, watch_points, condition_context)
  88. _recommend_tensor_all_zero(merged_info, condition_mgr, watch_points, condition_context)
  89. # add activation watch points
  90. merged_info = get_basic_node_info(TargetTypeEnum.ACTIVATION.value, graph_stream, ActivationFuncEnum.TANH.value)
  91. _recommend_activation_range(merged_info, condition_mgr, watch_points, condition_context,
  92. ActivationFuncEnum.TANH.value)
  93. merged_info = get_basic_node_info(TargetTypeEnum.ACTIVATION.value, graph_stream, ActivationFuncEnum.SIGMOID.value)
  94. _recommend_activation_range(merged_info, condition_mgr, watch_points, condition_context,
  95. ActivationFuncEnum.SIGMOID.value)
  96. merged_info = get_basic_node_info(TargetTypeEnum.ACTIVATION.value, graph_stream,
  97. [ActivationFuncEnum.RELU.value, ActivationFuncEnum.RELUV2.value])
  98. _recommend_activation_range(merged_info, condition_mgr, watch_points, condition_context,
  99. ActivationFuncEnum.RELU.value)
  100. return watch_points
  101. def _recommend_tensor_all_zero(basic_info_nodes, condition_mgr, watch_points, condition_context):
  102. """Recommend tensor all zero watchpoint."""
  103. if not basic_info_nodes:
  104. return
  105. if not condition_mgr.has_condition(ConditionIdEnum.TENSOR_ALL_ZERO.value, condition_context):
  106. return
  107. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.TENSOR_ALL_ZERO.value)
  108. tensor_all_zero_watchpoint = _WatchPointData(
  109. watch_condition={
  110. "condition": condition.id,
  111. "params": [_ConditionParameterValue(
  112. parameter=condition.get_parameter_definition("zero_percentage_ge"),
  113. value=100 # set default value to 100
  114. )]
  115. },
  116. watch_nodes=basic_info_nodes.copy(),
  117. name='recommend_tensor_all_zero_watchpoint'
  118. )
  119. watch_points.append(tensor_all_zero_watchpoint)
  120. def _recommend_tensor_overflow(basic_info_nodes, condition_mgr, watch_points, condition_context):
  121. """Recommend tensor general overflow watchpoint."""
  122. if not basic_info_nodes:
  123. return
  124. if not condition_mgr.has_condition(ConditionIdEnum.TENSOR_OVERFLOW.value, condition_context):
  125. return
  126. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.TENSOR_OVERFLOW.value)
  127. overflow_watchpoint = _WatchPointData(
  128. watch_condition={
  129. "condition": condition.id,
  130. "params": []
  131. },
  132. watch_nodes=basic_info_nodes.copy(),
  133. name='recommend_tensor_overflow_watchpoint'
  134. )
  135. watch_points.append(overflow_watchpoint)
  136. def _recommend_operator_overflow(basic_info_nodes, condition_mgr, watch_points, condition_context):
  137. """Recommend tensor overflow watchpoint."""
  138. if not basic_info_nodes:
  139. return
  140. if not condition_mgr.has_condition(ConditionIdEnum.OPERATOR_OVERFLOW.value, condition_context):
  141. return
  142. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.OPERATOR_OVERFLOW.value)
  143. overflow_d_watchpoint = _WatchPointData(
  144. watch_condition={
  145. "condition": condition.id,
  146. "params": []
  147. },
  148. watch_nodes=basic_info_nodes.copy(),
  149. name='recommend_operator_overflow_watchpoint'
  150. )
  151. watch_points.append(overflow_d_watchpoint)
  152. def _recommend_gradient_vanishing(basic_info_nodes, condition_mgr, watch_points, condition_context):
  153. """Recommend gradient vanishing watchpoint."""
  154. if not basic_info_nodes:
  155. return
  156. if not condition_mgr.has_condition(ConditionIdEnum.GRADIENT_VANISHING.value, condition_context):
  157. return
  158. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.GRADIENT_VANISHING.value)
  159. gradient_vanishing_watchpoint = _WatchPointData(
  160. watch_condition={
  161. "condition": condition.id,
  162. "params": [_ConditionParameterValue(
  163. parameter=condition.get_parameter_definition("abs_mean_lt"),
  164. value=1e-9 # set default value to 1e-9
  165. )]
  166. },
  167. watch_nodes=basic_info_nodes.copy(),
  168. name='recommend_gradient_vanishing_watchpoint'
  169. )
  170. watch_points.append(gradient_vanishing_watchpoint)
  171. def _recommend_weight_change_too_small(condition_mgr, trainable_weight_nodes, watch_points, condition_context):
  172. """Recommend weight change too small watchpoint."""
  173. if not trainable_weight_nodes:
  174. return
  175. if not condition_mgr.has_condition(ConditionIdEnum.WEIGHT_CHANGE_TOO_SMALL.value, condition_context):
  176. return
  177. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.WEIGHT_CHANGE_TOO_SMALL.value)
  178. weight_change_too_small_watchpoint = _WatchPointData(
  179. watch_condition={
  180. "condition": condition.id,
  181. "params": [
  182. _ConditionParameterValue(
  183. parameter=condition.get_parameter_definition("abs_mean_update_ratio_lt"),
  184. value=1.0e-4 # set default value to 1.0e-4
  185. ),
  186. ]
  187. },
  188. watch_nodes=trainable_weight_nodes,
  189. name='recommend_weight_change_too_small_watchpoint'
  190. )
  191. watch_points.append(weight_change_too_small_watchpoint)
  192. def _recommend_weight_not_changed(condition_mgr, trainable_weight_nodes, watch_points, condition_context):
  193. """Recommend weight not changed watchpoint."""
  194. if not trainable_weight_nodes:
  195. return
  196. if not condition_mgr.has_condition(ConditionIdEnum.WEIGHT_NOT_CHANGED.value, condition_context):
  197. return
  198. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.WEIGHT_NOT_CHANGED.value)
  199. weight_no_change_watchpoint = _WatchPointData(
  200. watch_condition={
  201. "condition": condition.id,
  202. "params": [
  203. _ConditionParameterValue(
  204. parameter=condition.get_parameter_definition("rtol"),
  205. value=1.0e-5 # set default value to 1.0e-5
  206. ),
  207. _ConditionParameterValue(
  208. parameter=condition.get_parameter_definition("atol"),
  209. value=1.0e-8 # set default value to 1.0e-8
  210. ),
  211. ]
  212. },
  213. watch_nodes=trainable_weight_nodes,
  214. name='recommend_weight_not_changed_watchpoint'
  215. )
  216. watch_points.append(weight_no_change_watchpoint)
  217. def _recommend_weight_change_too_large(basic_info_nodes, condition_mgr, watch_points, condition_context):
  218. """Recommend weight change too large watchpoint."""
  219. if not basic_info_nodes:
  220. return
  221. if not condition_mgr.has_condition(ConditionIdEnum.WEIGHT_CHANGE_TOO_LARGE.value, condition_context):
  222. return
  223. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.WEIGHT_CHANGE_TOO_LARGE.value)
  224. weight_initialization_watchpoint = _WatchPointData(
  225. watch_condition={
  226. "condition": condition.id,
  227. "params": [_ConditionParameterValue(
  228. parameter=condition.get_parameter_definition("abs_mean_update_ratio_gt"),
  229. value=1 # set default value to 1
  230. )]
  231. },
  232. watch_nodes=basic_info_nodes.copy(),
  233. name='recommend_weight_change_too_large_watchpoint'
  234. )
  235. watch_points.append(weight_initialization_watchpoint)
  236. def _recommend_weight_initialization(basic_info_nodes, condition_mgr, watch_points, condition_context):
  237. """Recommend weight initialization watchpoint."""
  238. if not basic_info_nodes:
  239. return
  240. if not condition_mgr.has_condition(ConditionIdEnum.WEIGHT_INITIALIZATION.value, condition_context):
  241. return
  242. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.WEIGHT_INITIALIZATION.value)
  243. weight_initialization_watchpoint = _WatchPointData(
  244. watch_condition={
  245. "condition": condition.id,
  246. "params": [_ConditionParameterValue(
  247. parameter=condition.get_parameter_definition("zero_percentage_ge"),
  248. value=100 # set default value to 100
  249. )]
  250. },
  251. watch_nodes=basic_info_nodes.copy(),
  252. name='recommend_weight_initialization_watchpoint'
  253. )
  254. watch_points.append(weight_initialization_watchpoint)
  255. def _recommend_activation_range(basic_info_nodes, condition_mgr, watch_points, condition_context, activation_func):
  256. """Recommend activation range watchpoint."""
  257. if not basic_info_nodes:
  258. return
  259. if not condition_mgr.has_condition(ConditionIdEnum.ACTIVATION_RANGE.value, condition_context):
  260. return
  261. condition = condition_mgr.get_condition(condition_id=ConditionIdEnum.ACTIVATION_RANGE.value)
  262. params = _get_recommend_activation_params(condition, activation_func)
  263. activation_range_watchpoint = _WatchPointData(
  264. watch_condition={
  265. "condition": condition.id,
  266. "params": params
  267. },
  268. watch_nodes=basic_info_nodes.copy(),
  269. name='recommend_{}_activation_range_watchpoint'.format(activation_func.lower())
  270. )
  271. watch_points.append(activation_range_watchpoint)
  272. def get_basic_node_info(node_category, graph_stream, activation_func=None):
  273. """Get node merged info."""
  274. basic_info_nodes = _get_basic_node_info_by_node_category(node_category, graph_stream, activation_func)
  275. merged_info = _merge_nodes(basic_info_nodes, graph_stream.whole_graph)
  276. merged_info = _add_graph_name(merged_info, graph_stream)
  277. return merged_info
  278. def _get_basic_node_info_by_node_category(node_category, graph_stream, activation_func=None):
  279. """Get node basic info by node category."""
  280. pattern = {'node_category': node_category}
  281. if activation_func:
  282. pattern['condition'] = {'activation_func': activation_func}
  283. all_graph_nodes = graph_stream.search_in_graph(pattern)
  284. return all_graph_nodes
  285. def _convert_tree_to_node_list(node_tree, node_list):
  286. """Convert WatchNodeTree to Node list."""
  287. if node_tree.watch_status in [WatchNodeTree.NOT_WATCH, WatchNodeTree.INVALID]:
  288. logger.debug("The watch_status of node: %s is not_watch or invalid.", node_tree.node_name)
  289. return
  290. if node_tree.watch_status == WatchNodeTree.TOTAL_WATCH:
  291. node_basic_info = NodeBasicInfo(name=node_tree.node_name, full_name=node_tree.full_name,
  292. type=node_tree.node_type)
  293. node_list.append(node_basic_info)
  294. return
  295. if node_tree.watch_status == WatchNodeTree.PARTIAL_WATCH:
  296. for _, sub_tree in node_tree.get_children():
  297. _convert_tree_to_node_list(sub_tree, node_list)
  298. def _update_watch_status(node_tree, graph):
  299. """Update the watch_status, if all sub_nodes of a WatchNodeTree are total_watch,
  300. then the WatchNodeTree is changed to total_watch status."""
  301. tmp_node_queue = Queue.Queue()
  302. tmp_node_queue.put(node_tree)
  303. # watch node list in layer order
  304. watch_tree_list = []
  305. while not tmp_node_queue.empty():
  306. cur_tree = tmp_node_queue.get()
  307. watch_tree_list.append(cur_tree)
  308. for _, sub_tree in cur_tree.get_children():
  309. tmp_node_queue.put(sub_tree)
  310. # update the watch_status from bottom to top
  311. while watch_tree_list:
  312. cur_tree = watch_tree_list.pop()
  313. node_name = cur_tree.node_name
  314. logger.debug("Update status of node: %s.", node_name)
  315. # if node_name is "", it is the root node, which is not in normal_node_map
  316. if not node_name:
  317. continue
  318. sub_count = graph.normal_node_map.get(node_name).subnode_count
  319. # if the children_count of WatchNodeTree is less than the responding subnode_count in the graph,
  320. # its watch_status must be partial_watch
  321. if cur_tree.get_children_count() < sub_count:
  322. continue
  323. is_all_chosen = True
  324. for _, sub_tree in cur_tree.get_children():
  325. if sub_tree.watch_status != WatchNodeTree.TOTAL_WATCH:
  326. is_all_chosen = False
  327. break
  328. if is_all_chosen:
  329. cur_tree.watch_status = WatchNodeTree.TOTAL_WATCH
  330. def _merge_nodes(leaf_nodes, graph):
  331. """Merge nodes in one graph."""
  332. watch_node_tree = WatchNodeTree()
  333. for node in leaf_nodes:
  334. watch_node_tree.add_node(node.name, node.type, node.full_name)
  335. _update_watch_status(watch_node_tree, graph)
  336. out_nodes = []
  337. _convert_tree_to_node_list(watch_node_tree, out_nodes)
  338. logger.debug("out_nodes: %s", out_nodes)
  339. return out_nodes
  340. def _add_graph_name(nodes, graph_stream):
  341. """Add graph_name in node.name."""
  342. if len(graph_stream.graph) > 1:
  343. return nodes
  344. graph_name = graph_stream.graph_names[0]
  345. output_nodes = []
  346. for node in nodes:
  347. node_basic_info = graph_stream.construct_node_basic_info(
  348. full_name=node.full_name, graph_name=graph_name, node_name=node.name, node_type=node.type)
  349. output_nodes.append(node_basic_info)
  350. return output_nodes
  351. def _sigmoid(value):
  352. """Calculate the sigmoid of value."""
  353. return 1.0 / (1.0 + math.exp(-value))
  354. def _get_recommend_activation_params(condition, activation_func):
  355. """Get recommend params for tanh, sigmoid and relu activation function."""
  356. params = []
  357. if activation_func == ActivationFuncEnum.TANH.value:
  358. # The recommend params for Tanh: The percentage of value in range (tanh(-8.8), tanh(8.8)) is lower than 0.1%
  359. params = [
  360. _ConditionParameterValue(
  361. parameter=condition.get_parameter_definition("range_percentage_lt"),
  362. value=0.1
  363. ),
  364. _ConditionParameterValue(
  365. parameter=condition.get_parameter_definition("range_start_inclusive"),
  366. value=math.tanh(-8.8)
  367. ),
  368. _ConditionParameterValue(
  369. parameter=condition.get_parameter_definition("range_end_inclusive"),
  370. value=math.tanh(8.8)
  371. )]
  372. if activation_func == ActivationFuncEnum.SIGMOID.value:
  373. # The recommend params for Sigmoid:
  374. # The percentage of value in range (sigmoid(-16.2)), sigmoid(16.2)) is lower than 0.1%
  375. params = [
  376. _ConditionParameterValue(
  377. parameter=condition.get_parameter_definition("range_percentage_lt"),
  378. value=0.1
  379. ),
  380. _ConditionParameterValue(
  381. parameter=condition.get_parameter_definition("range_start_inclusive"),
  382. value=_sigmoid(-16.2)
  383. ),
  384. _ConditionParameterValue(
  385. parameter=condition.get_parameter_definition("range_end_inclusive"),
  386. value=_sigmoid(16.2)
  387. )]
  388. if activation_func == ActivationFuncEnum.RELU.value:
  389. # The recommend params for ReLU:
  390. # The percentage of value in range (-1, 0) is greater than 99.9%
  391. params = [
  392. _ConditionParameterValue(
  393. parameter=condition.get_parameter_definition("range_percentage_gt"),
  394. value=99.9
  395. ),
  396. _ConditionParameterValue(
  397. parameter=condition.get_parameter_definition("range_start_inclusive"),
  398. value=-1
  399. ),
  400. _ConditionParameterValue(
  401. parameter=condition.get_parameter_definition("range_end_inclusive"),
  402. value=0
  403. )]
  404. return params