Browse Source

!973 Fix searching result in multigraph when graph_scope included in key words

From: @maning202007
Reviewed-by: @wangyue01
Signed-off-by:
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
0ee0a58eab
10 changed files with 26 additions and 82 deletions
  1. +2
    -14
      mindinsight/debugger/conditionmgr/recommender.py
  2. +4
    -4
      mindinsight/debugger/debugger_grpc_server.py
  3. +5
    -5
      mindinsight/debugger/stream_cache/node_type_identifier.py
  4. +5
    -50
      mindinsight/debugger/stream_handler/graph_handler.py
  5. +1
    -0
      tests/st/func/debugger/expect_results/restful_results/search_gradient_multi_graph.json
  6. +2
    -1
      tests/st/func/debugger/test_restful_api.py
  7. +1
    -1
      tests/ut/debugger/expected_results/graph/search_nodes_by_type_0.json
  8. +1
    -1
      tests/ut/debugger/expected_results/graph/search_nodes_by_type_1.json
  9. +3
    -3
      tests/ut/debugger/stream_cache/test_node_type_identifier.py
  10. +2
    -3
      tests/ut/debugger/stream_handler/test_graph_handler.py

+ 2
- 14
mindinsight/debugger/conditionmgr/recommender.py View File

@@ -334,20 +334,8 @@ def _get_basic_node_info_by_node_category(node_category, graph_stream, activatio
pattern = {'node_category': node_category}
if activation_func:
pattern['condition'] = {'activation_func': activation_func}
all_graph_nodes = graph_stream.get_searched_nodes(pattern)
basic_info_nodes = []
for graph_name, nodes in all_graph_nodes.items():
if len(all_graph_nodes) == 1:
logger.debug("This is a single graph")
graph_name = ""
for node in nodes:
if graph_name == "":
basic_node_info = NodeBasicInfo(name=node.name, full_name=node.full_name, type=node.type)
else:
basic_node_info = graph_stream.construct_node_basic_info(
full_name=node.full_name, graph_name=graph_name, node_name=node.name, node_type=node.type)
basic_info_nodes.append(basic_node_info)
return basic_info_nodes
all_graph_nodes = graph_stream.search_in_graph(pattern)
return all_graph_nodes


def _merge_nodes(leaf_nodes, graph):


+ 4
- 4
mindinsight/debugger/debugger_grpc_server.py View File

@@ -432,12 +432,12 @@ class DebuggerGrpcServer(grpc_server_base.EventListenerServicer):

def _record_parameter_names(self):
"""Record parameter full names in tensor handler."""
parameter_nodes = self._cache_store.get_stream_handler(Streams.GRAPH).get_searched_nodes(
parameter_nodes = self._cache_store.get_stream_handler(Streams.GRAPH).search_in_graph(
pattern={'node_category': TargetTypeEnum.PARAMETER.value})
tensor_stream = self._cache_store.get_stream_handler(Streams.TENSOR)
for nodes in parameter_nodes.values():
tensor_names = [node.full_name + ':0' for node in nodes]
tensor_stream.record_parameter_names(tensor_names)
for node in parameter_nodes:
tensor_name = [node.full_name + ':0']
tensor_stream.record_parameter_names(tensor_name)

@debugger_wrap
def SendTensors(self, request_iterator, context):


+ 5
- 5
mindinsight/debugger/stream_cache/node_type_identifier.py View File

@@ -90,11 +90,11 @@ def is_weight_node(node):
bool, if the node is weight type.
"""
if node.type == NodeTypeEnum.PARAMETER.value:
node_name = node.name.lower()
full_name = node.full_name.lower()
weight_flag = False
if node_name.endswith('.weight') or node_name.endswith('.bias'):
if full_name.endswith('.weight') or full_name.endswith('.bias'):
weight_flag = True
if weight_flag and 'optimizer-' not in node_name and not node_name.startswith('gradients/'):
if weight_flag and 'optimizer-' not in full_name and not full_name.startswith('gradients/'):
return True
return False

@@ -136,8 +136,8 @@ def is_gradient_node(node):
Returns:
bool, if the node is gradient type.
"""
node_name = node.name.lower()
if node_name.startswith('gradients/') and \
full_name = node.full_name.lower()
if full_name.startswith('gradients/') and \
node.type not in [NodeTypeEnum.PARAMETER.value, NodeTypeEnum.CONST.value]:
return True
return False


+ 5
- 50
mindinsight/debugger/stream_handler/graph_handler.py View File

@@ -257,58 +257,13 @@ class GraphHandler(StreamHandlerBase):
dict, the searched node.
"""
graph_name = pattern.pop('graph_name', None)
search_nodes = self.get_searched_nodes(pattern, graph_name)
search_nodes = self.search_in_graph(pattern, graph_name)
# construct to search tree
if not self._has_graph_scope(graph_name):
for graph_name, searched_node_list in search_nodes.items():
graph = self._get_graph(graph_name=graph_name)
format_nodes = graph.get_nodes(searched_node_list)
return {'nodes': format_nodes}
# deal with graph_name is None
res = []
for graph_name, graph in self._graph.items():
format_nodes = graph.get_nodes(search_nodes.get(graph_name, []))
if not format_nodes:
continue
self._add_graph_scope_for_nodes(format_nodes, graph_name)
search_graph = {
'name': graph_name,
'type': 'name_scope',
'nodes': format_nodes
}
res.append(search_graph)
return {'nodes': res}

def get_searched_node_list(self, pattern, graph_name):
"""Get searched node list in single graph."""
searched_nodes = self.get_searched_nodes(pattern, graph_name)
return searched_nodes.get(graph_name, [])

def get_searched_nodes(self, pattern, graph_name=None):
"""
Search nodes by given pattern.

Args:
pattern (dict): Filter condition.

- name (str): The name pattern.
- node_category (str): The node_category. Default: None
- condition (dict): The additional filter condition.
graph_name (str): The graph name. If not given, search in all sub graphs. Default: None.

Returns:
dict, the searched nodes. The format is dict of <graph_name, list[Node]>.
"""
if not graph_name:
graph_names = self.graph_names
else:
graph_names = [graph_name]
search_nodes = {}
for sub_graph_name in graph_names:
search_nodes[sub_graph_name] = self._search_in_single_graph(pattern, sub_graph_name)
return search_nodes
graph = self._get_graph(graph_name=graph_name)
format_nodes = graph.get_nodes(search_nodes)
return {'nodes': format_nodes}

def _search_in_single_graph(self, pattern, graph_name=None):
def search_in_graph(self, pattern, graph_name=None):
"""
Search nodes by given pattern.



+ 1
- 0
tests/st/func/debugger/expect_results/restful_results/search_gradient_multi_graph.json
File diff suppressed because it is too large
View File


+ 2
- 1
tests/st/func/debugger/test_restful_api.py View File

@@ -559,7 +559,8 @@ class TestMultiGraphDebugger:
@pytest.mark.platform_x86_ascend_training
@pytest.mark.parametrize("filter_condition, expect_file", [
({'name': '', 'node_category': 'weight'}, 'search_weight_multi_graph.json'),
({'node_category': 'activation'}, 'search_activation_multi_graph.json')
({'node_category': 'activation'}, 'search_activation_multi_graph.json'),
({'node_category': 'gradient'}, 'search_gradient_multi_graph.json')
])
def test_search_by_category_with_multi_graph(self, app_client, filter_condition, expect_file):
"""Test search by category request."""


+ 1
- 1
tests/ut/debugger/expected_results/graph/search_nodes_by_type_0.json View File

@@ -1 +1 @@
{"node_names": ["Default/network-WithLossCell/_backbone-LeNet5/conv2-Conv2d/conv2.weight", "Default/network-WithLossCell/_backbone-LeNet5/conv1-Conv2d/conv1.weight", "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc1.weight", "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc1.bias", "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc2.weight", "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc2.bias", "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc3.weight", "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc3.bias"]}
{"nodes": [{"name": "Default", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/conv2-Conv2d", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/conv2-Conv2d/conv2.weight", "type": "Parameter", "nodes": []}]}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/conv1-Conv2d", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/conv1-Conv2d/conv1.weight", "type": "Parameter", "nodes": []}]}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6", "type": "aggregation_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc1.weight", "type": "Parameter", "nodes": []}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc1.bias", "type": "Parameter", "nodes": []}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc2.weight", "type": "Parameter", "nodes": []}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc2.bias", "type": "Parameter", "nodes": []}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc3.weight", "type": "Parameter", "nodes": []}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/fc3-Dense/Parameter[6]_6/fc3.bias", "type": "Parameter", "nodes": []}]}]}]}]}]}]}

+ 1
- 1
tests/ut/debugger/expected_results/graph/search_nodes_by_type_1.json View File

@@ -1 +1 @@
{"node_names": ["Default/network-WithLossCell/_backbone-LeNet5/relu-ReLU/ReLU-op12", "Default/network-WithLossCell/_backbone-LeNet5/relu-ReLU/ReLU-op15"]}
{"nodes": [{"name": "Default", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/relu-ReLU", "type": "name_scope", "nodes": [{"name": "Default/network-WithLossCell/_backbone-LeNet5/relu-ReLU/ReLU-op12", "type": "ReLU", "nodes": []}, {"name": "Default/network-WithLossCell/_backbone-LeNet5/relu-ReLU/ReLU-op15", "type": "ReLU", "nodes": []}]}]}]}]}]}

+ 3
- 3
tests/ut/debugger/stream_cache/test_node_type_identifier.py View File

@@ -39,7 +39,7 @@ class TestNodeTypeIdentifier:
"""Test weight node."""
identifier = NodeTypeIdentifier('weight')
mock_node = MagicMock(type=node_type)
mock_node.name = name
mock_node.full_name = name
res = identifier.is_match(mock_node)
assert res is result

@@ -52,7 +52,7 @@ class TestNodeTypeIdentifier:
"""Test gradient node."""
identifier = NodeTypeIdentifier('gradient')
mock_node = MagicMock(type=node_type)
mock_node.name = name
mock_node.full_name = name
res = identifier.is_match(mock_node)
assert res is result

@@ -67,7 +67,7 @@ class TestNodeTypeIdentifier:
"""Test activate node."""
identifier = NodeTypeIdentifier('activation')
mock_node = MagicMock(type=node_type)
mock_node.name = name
mock_node.full_name = name
res = identifier.is_match(mock_node, condition)
assert res is result



+ 2
- 3
tests/ut/debugger/stream_handler/test_graph_handler.py View File

@@ -77,10 +77,9 @@ class TestGraphHandler:
])
def test_search_nodes_by_type(self, node_type, condition, result_file):
"""Test search nodes by type."""
search_nodes = self.graph_handler.get_searched_node_list(
{'node_category': node_type, 'condition': condition}, 'kernel_graph_0')
result = self.graph_handler.search_nodes(
{'node_category': node_type, 'condition': condition, 'graph_name': 'kernel_graph_0'})
file_path = os.path.join(self.graph_results_dir, result_file)
result = {'node_names': [node.name for node in search_nodes]}
compare_result_with_file(result, file_path)

@pytest.mark.parametrize("node_name, expect_type", [


Loading…
Cancel
Save