Browse Source

add multi-params feature

because some parameters required other parameters, so adding the parm_type and required_params to support the feature.
tags/v1.1.0
jiangshuqiang 5 years ago
parent
commit
33484f5424
3 changed files with 157 additions and 34 deletions
  1. +31
    -3
      mindinsight/conditionmgr/condition.py
  2. +123
    -30
      mindinsight/conditionmgr/condition_list.py
  3. +3
    -1
      mindinsight/conditionmgr/conditionmgr.py

+ 31
- 3
mindinsight/conditionmgr/condition.py View File

@@ -18,6 +18,8 @@ Management of all conditions.
This module is used to register all conditions, as well as their parameters. This module is used to register all conditions, as well as their parameters.
This module also provide the available conditions to condition_collections api. This module also provide the available conditions to condition_collections api.
""" """
import math

from enum import Enum from enum import Enum
from mindinsight.conditionmgr.log import logger from mindinsight.conditionmgr.log import logger


@@ -54,6 +56,8 @@ class ConditionIdEnum(Enum):
TENSOR_CHANGE_TOO_LARGE = "tensor_change_too_large" TENSOR_CHANGE_TOO_LARGE = "tensor_change_too_large"
TENSOR_CHANGE_TOO_SMALL = "tensor_change_too_small" TENSOR_CHANGE_TOO_SMALL = "tensor_change_too_small"
TENSOR_NOT_CHANGED = "tensor_not_changed" TENSOR_NOT_CHANGED = "tensor_not_changed"
ACTIVATION_RANGE = "activation_range"
TENSOR_RANGE = "tensor_range"




class OptimizePhaseEnum(Enum): class OptimizePhaseEnum(Enum):
@@ -85,6 +89,12 @@ class TargetTypeEnum(Enum):
GRADIENT = 'gradient' GRADIENT = 'gradient'




class ParamTypeEnum(Enum):
"""Param types."""
CHECK_PARAM = "CHECK_PARAM"
SUPPORT_PARAM = "SUPPORT_PARAM"


class ConditionContext: class ConditionContext:
""" """
The class for condition context. The class for condition context.
@@ -126,16 +136,20 @@ class ConditionParameter:
support_disable (bool): whether the param support no assignment. support_disable (bool): whether the param support no assignment.
default_value (float): default value. default_value (float): default value.
visible_on_ui (bool): whether the param visible on ui. visible_on_ui (bool): whether the param visible on ui.
param_type (ParamTypeEnum): parameters type.
required_params (list): the list of required parameters.
""" """


def __init__(self, name, value_type: ValueTypeEnum, valid_test_func=None, support_disable=True, default_value=None, def __init__(self, name, value_type: ValueTypeEnum, valid_test_func=None, support_disable=True, default_value=None,
visible_on_ui=True):
visible_on_ui=True, param_type=ParamTypeEnum.CHECK_PARAM, required_params=None):
self._name = name self._name = name
self._type = value_type self._type = value_type
self._valid_test_func = valid_test_func self._valid_test_func = valid_test_func
self._support_disable = support_disable self._support_disable = support_disable
self._default_value = default_value self._default_value = default_value
self._visible_on_ui = visible_on_ui self._visible_on_ui = visible_on_ui
self._param_type = param_type.value
self._required_params = required_params


@property @property
def name(self): def name(self):
@@ -162,6 +176,16 @@ class ConditionParameter:
"""Get visible_on_ui of parameter.""" """Get visible_on_ui of parameter."""
return self._visible_on_ui return self._visible_on_ui


@property
def param_type(self):
"""Get param_type of parameter."""
return self._param_type

@property
def required_params(self):
"""Get required_param of parameter."""
return self._required_params

def is_valid(self, value): def is_valid(self, value):
"""Check is the parameter valid.""" """Check is the parameter valid."""
if self._valid_test_func is None: if self._valid_test_func is None:
@@ -174,7 +198,7 @@ class Condition:
The class for parameters of conditions. The class for parameters of conditions.


Args: Args:
condition_id (str): condition id.
condition_id (ConditionIdEnum): condition id.
abbr (str): the abbreviation of condition id. abbr (str): the abbreviation of condition id.
optimize_phase (OptimizePhaseEnum): optimize phase. optimize_phase (OptimizePhaseEnum): optimize phase.
parameters (List[ConditionParameter]): parameters. parameters (List[ConditionParameter]): parameters.
@@ -185,7 +209,7 @@ class Condition:
""" """
def __init__(self, condition_id, abbr, optimize_phase, parameters, supported_target_type, supported_platforms, def __init__(self, condition_id, abbr, optimize_phase, parameters, supported_target_type, supported_platforms,
minimum_debugger_capability, availability_test_func=None): minimum_debugger_capability, availability_test_func=None):
self.id = condition_id
self.id = condition_id.value
self._abbr = abbr self._abbr = abbr
self.optimize_phase = optimize_phase self.optimize_phase = optimize_phase
self._parameters = { self._parameters = {
@@ -258,3 +282,7 @@ def check_abs_param_range(value):
if 0 <= value < float("inf"): if 0 <= value < float("inf"):
return True return True
return False return False


def check_not_nan(value):
return not math.isnan(value)

+ 123
- 30
mindinsight/conditionmgr/condition_list.py View File

@@ -23,15 +23,18 @@ from mindinsight.conditionmgr.condition import ConditionParameter
from mindinsight.conditionmgr.condition import ValueTypeEnum from mindinsight.conditionmgr.condition import ValueTypeEnum
from mindinsight.conditionmgr.condition import TargetTypeEnum from mindinsight.conditionmgr.condition import TargetTypeEnum
from mindinsight.conditionmgr.condition import PlatformEnum from mindinsight.conditionmgr.condition import PlatformEnum
from mindinsight.conditionmgr.condition import ParamTypeEnum
from mindinsight.conditionmgr.condition import ConditionIdEnum
from mindinsight.conditionmgr.condition import check_initialization_available from mindinsight.conditionmgr.condition import check_initialization_available
from mindinsight.conditionmgr.condition import check_normal_param_range from mindinsight.conditionmgr.condition import check_normal_param_range
from mindinsight.conditionmgr.condition import check_percentage_param_range from mindinsight.conditionmgr.condition import check_percentage_param_range
from mindinsight.conditionmgr.condition import check_abs_param_range from mindinsight.conditionmgr.condition import check_abs_param_range
from mindinsight.conditionmgr.condition import check_not_nan




CONDITION_LIST = [ CONDITION_LIST = [
Condition( Condition(
condition_id="weight_initialization",
condition_id=ConditionIdEnum.WEIGHT_INITIALIZATION,
abbr="WI", abbr="WI",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_initialization # Send this condition to MindSpore will use WatchCondition.Condition.tensor_initialization
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -59,7 +62,7 @@ CONDITION_LIST = [
availability_test_func=check_initialization_available availability_test_func=check_initialization_available
), ),
Condition( Condition(
condition_id="weight_overflow",
condition_id=ConditionIdEnum.WEIGHT_OVERFLOW,
abbr="WO", abbr="WO",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_general_overflow # Send this condition to MindSpore will use WatchCondition.Condition.tensor_general_overflow
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -69,7 +72,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="weight_too_large",
condition_id=ConditionIdEnum.WEIGHT_TOO_LARGE,
abbr="WL", abbr="WL",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_large # Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_large
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -100,7 +103,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="weight_too_small",
condition_id=ConditionIdEnum.WEIGHT_TOO_SMALL,
abbr="WS", abbr="WS",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_small # Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_small
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -131,7 +134,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="gradient_vanishing",
condition_id=ConditionIdEnum.GRADIENT_VANISHING,
abbr="GV", abbr="GV",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_small # Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_small
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -162,7 +165,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="gradient_too_large",
condition_id=ConditionIdEnum.GRADIENT_TOO_LARGE,
abbr="GL", abbr="GL",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_large # Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_large
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -193,7 +196,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="gradient_exploding",
condition_id=ConditionIdEnum.GRADIENT_EXPLODING,
abbr="GE", abbr="GE",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_general_overflow # Send this condition to MindSpore will use WatchCondition.Condition.tensor_general_overflow
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -203,7 +206,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="tensor_overflow",
condition_id=ConditionIdEnum.TENSOR_OVERFLOW,
abbr="TO", abbr="TO",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_general_overflow # Send this condition to MindSpore will use WatchCondition.Condition.tensor_general_overflow
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -213,7 +216,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="operator_overflow",
condition_id=ConditionIdEnum.OPERATOR_OVERFLOW,
abbr="OO", abbr="OO",
# Send this condition to MindSpore will use WatchCondition.Condition.overflow # Send this condition to MindSpore will use WatchCondition.Condition.overflow
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -223,7 +226,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="nan",
condition_id=ConditionIdEnum.NAN,
abbr="NAN", abbr="NAN",
# Send this condition to MindSpore will use WatchCondition.Condition.nan # Send this condition to MindSpore will use WatchCondition.Condition.nan
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -233,7 +236,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="overflow",
condition_id=ConditionIdEnum.OVERFLOW_ASCEND_CHIP,
abbr="OVERFLOW", abbr="OVERFLOW",
# Send this condition to MindSpore will use WatchCondition.Condition.overflow # Send this condition to MindSpore will use WatchCondition.Condition.overflow
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -243,7 +246,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="inf",
condition_id=ConditionIdEnum.INF,
abbr="INF", abbr="INF",
# Send this condition to MindSpore will use WatchCondition.Condition.inf # Send this condition to MindSpore will use WatchCondition.Condition.inf
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -253,7 +256,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="max_gt",
condition_id=ConditionIdEnum.MAX_GT,
abbr="MAX>", abbr="MAX>",
# Send this condition to MindSpore will use WatchCondition.Condition.max_gt # Send this condition to MindSpore will use WatchCondition.Condition.max_gt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -269,7 +272,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="max_lt",
condition_id=ConditionIdEnum.MAX_LT,
abbr="MAX<", abbr="MAX<",
# Send this condition to MindSpore will use WatchCondition.Condition.max_lt # Send this condition to MindSpore will use WatchCondition.Condition.max_lt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -285,7 +288,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="min_gt",
condition_id=ConditionIdEnum.MIN_GT,
abbr="MIN>", abbr="MIN>",
# Send this condition to MindSpore will use WatchCondition.Condition.min_gt # Send this condition to MindSpore will use WatchCondition.Condition.min_gt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -301,7 +304,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="min_lt",
condition_id=ConditionIdEnum.MIN_LT,
abbr="MIN<", abbr="MIN<",
# Send this condition to MindSpore will use WatchCondition.Condition.min_lt # Send this condition to MindSpore will use WatchCondition.Condition.min_lt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -317,7 +320,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="max_min_gt",
condition_id=ConditionIdEnum.MAX_MIN_GT,
abbr="MAX-MIN>", abbr="MAX-MIN>",
# Send this condition to MindSpore will use WatchCondition.Condition.max_min_gt # Send this condition to MindSpore will use WatchCondition.Condition.max_min_gt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -333,7 +336,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="max_min_lt",
condition_id=ConditionIdEnum.MAX_MIN_LT,
abbr="MAX-Min<", abbr="MAX-Min<",
# Send this condition to MindSpore will use WatchCondition.Condition.max_min_lt # Send this condition to MindSpore will use WatchCondition.Condition.max_min_lt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -349,7 +352,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="mean_gt",
condition_id=ConditionIdEnum.MEAN_GT,
abbr="MEAN>", abbr="MEAN>",
# Send this condition to MindSpore will use WatchCondition.Condition.mean_gt # Send this condition to MindSpore will use WatchCondition.Condition.mean_gt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -365,7 +368,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="mean_lt",
condition_id=ConditionIdEnum.MEAN_LT,
abbr="MEAN<", abbr="MEAN<",
# Send this condition to MindSpore will use WatchCondition.Condition.mean_lt # Send this condition to MindSpore will use WatchCondition.Condition.mean_lt
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -381,7 +384,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 0) minimum_debugger_capability=(1, 0)
), ),
Condition( Condition(
condition_id="tensor_initialization",
condition_id=ConditionIdEnum.TENSOR_INITIALIZATION,
abbr="TI", abbr="TI",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_initialization # Send this condition to MindSpore will use WatchCondition.Condition.tensor_initialization
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -409,7 +412,7 @@ CONDITION_LIST = [
availability_test_func=check_initialization_available availability_test_func=check_initialization_available
), ),
Condition( Condition(
condition_id="tensor_too_large",
condition_id=ConditionIdEnum.TENSOR_TOO_LARGE,
abbr="TL", abbr="TL",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_large # Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_large
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -440,7 +443,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="tensor_too_small",
condition_id=ConditionIdEnum.TENSOR_TOO_SMALL,
abbr="TS", abbr="TS",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_small # Send this condition to MindSpore will use WatchCondition.Condition.tensor_too_small
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -471,7 +474,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="tensor_all_zero",
condition_id=ConditionIdEnum.TENSOR_ALL_ZERO,
abbr="TZ", abbr="TZ",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_all_zero # Send this condition to MindSpore will use WatchCondition.Condition.tensor_all_zero
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -488,7 +491,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="weight_not_changed",
condition_id=ConditionIdEnum.WEIGHT_NOT_CHANGED,
abbr="WNC", abbr="WNC",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_not_changed # Send this condition to MindSpore will use WatchCondition.Condition.tensor_not_changed
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -519,7 +522,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="weight_change_too_large",
condition_id=ConditionIdEnum.WEIGHT_CHANGE_TOO_LARGE,
abbr="WCL", abbr="WCL",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_large # Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_large
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -543,7 +546,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="weight_change_too_small",
condition_id=ConditionIdEnum.WEIGHT_CHANGE_TOO_SMALL,
abbr="WCS", abbr="WCS",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_small # Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_small
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -567,7 +570,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="tensor_change_too_large",
condition_id=ConditionIdEnum.TENSOR_CHANGE_TOO_LARGE,
abbr="TCL", abbr="TCL",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_large # Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_large
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -591,7 +594,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="tensor_change_too_small",
condition_id=ConditionIdEnum.TENSOR_CHANGE_TOO_SMALL,
abbr="TCS", abbr="TCS",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_small # Send this condition to MindSpore will use WatchCondition.Condition.tensor_change_too_small
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -615,7 +618,7 @@ CONDITION_LIST = [
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
), ),
Condition( Condition(
condition_id="tensor_not_changed",
condition_id=ConditionIdEnum.TENSOR_NOT_CHANGED,
abbr="TNC", abbr="TNC",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_not_changed # Send this condition to MindSpore will use WatchCondition.Condition.tensor_not_changed
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK, optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
@@ -644,5 +647,95 @@ CONDITION_LIST = [
supported_target_type=TargetTypeEnum.TENSOR, supported_target_type=TargetTypeEnum.TENSOR,
supported_platforms=(PlatformEnum.ASCEND, PlatformEnum.GPU), supported_platforms=(PlatformEnum.ASCEND, PlatformEnum.GPU),
minimum_debugger_capability=(1, 1) minimum_debugger_capability=(1, 1)
),
Condition(
condition_id=ConditionIdEnum.ACTIVATION_RANGE,
abbr="AR",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_not_changed
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
parameters=[
ConditionParameter(
name="range_start_inclusive",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_not_nan,
param_type=ParamTypeEnum.SUPPORT_PARAM
),
ConditionParameter(
name="range_end_inclusive",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_not_nan,
param_type=ParamTypeEnum.SUPPORT_PARAM
),
ConditionParameter(
name="range_percentage_lt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_percentage_param_range,
required_params=["range_start_inclusive", "range_end_inclusive"]
),
ConditionParameter(
name="range_percentage_gt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_percentage_param_range,
required_params=["range_start_inclusive", "range_end_inclusive"]
),
ConditionParameter(
name="max_min_lt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_normal_param_range
),
ConditionParameter(
name="max_min_gt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_normal_param_range
)
],
supported_target_type=TargetTypeEnum.ACTIVATION,
supported_platforms=(PlatformEnum.ASCEND, PlatformEnum.GPU),
minimum_debugger_capability=(1, 1)
),
Condition(
condition_id=ConditionIdEnum.TENSOR_RANGE,
abbr="TR",
# Send this condition to MindSpore will use WatchCondition.Condition.tensor_not_changed
optimize_phase=OptimizePhaseEnum.TENSOR_CHECK,
parameters=[
ConditionParameter(
name="range_start_inclusive",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_not_nan,
param_type=ParamTypeEnum.SUPPORT_PARAM
),
ConditionParameter(
name="range_end_inclusive",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_not_nan,
param_type=ParamTypeEnum.SUPPORT_PARAM
),
ConditionParameter(
name="range_percentage_lt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_percentage_param_range,
required_params=["range_start_inclusive", "range_end_inclusive"]
),
ConditionParameter(
name="range_percentage_gt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_percentage_param_range,
required_params=["range_start_inclusive", "range_end_inclusive"]
),
ConditionParameter(
name="max_min_lt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_normal_param_range
),
ConditionParameter(
name="max_min_gt",
value_type=ValueTypeEnum.FLOAT64,
valid_test_func=check_normal_param_range
)
],
supported_target_type=TargetTypeEnum.TENSOR,
supported_platforms=(PlatformEnum.ASCEND, PlatformEnum.GPU),
minimum_debugger_capability=(1, 1)
) )
] ]

+ 3
- 1
mindinsight/conditionmgr/conditionmgr.py View File

@@ -114,7 +114,9 @@ class ConditionMgr:
"name": param.name, "name": param.name,
"type": param.type.name, "type": param.type.name,
"support_disable": param.support_disable, "support_disable": param.support_disable,
"default_value": param.default_value
"default_value": param.default_value,
"param_type": param.param_type,
"required_params": param.required_params
}) })
collections[condition.supported_target_type.value].append({ collections[condition.supported_target_type.value].append({
"id": condition.id, "id": condition.id,


Loading…
Cancel
Save