| @@ -34,6 +34,17 @@ def _generate_schema_err_msg(err_msg, *args): | |||
| return {"invalid": err_msg} | |||
| def _generate_err_msg_for_nested_keys(err_msg, *args): | |||
| """Organize error messages for system defined parameters.""" | |||
| err_dict = {} | |||
| for name in args[::-1]: | |||
| if not err_dict: | |||
| err_dict.update({name: err_msg}) | |||
| else: | |||
| err_dict = {name: err_dict} | |||
| return err_dict | |||
| def include_integer(low, high): | |||
| """Check if the range [low, high) includes integer.""" | |||
| def _in_range(num, low, high): | |||
| @@ -91,11 +102,17 @@ class ParameterSchema(Schema): | |||
| raise ValidationError("The upper bound must be greater than lower bound. " | |||
| "The range is [lower_bound, upper_bound).") | |||
| @validates("choice") | |||
| def check_choice(self, choice): | |||
| """Check if choice is valid.""" | |||
| if not choice: | |||
| raise ValidationError("It is empty, please fill in at least one value.") | |||
| @validates("type") | |||
| def check_type(self, type_in): | |||
| """Check if type is valid.""" | |||
| if type_in not in HyperParamType.list_members(): | |||
| raise ValidationError("The type should be in %s." % HyperParamType.list_members()) | |||
| raise ValidationError("It should be in %s." % HyperParamType.list_members()) | |||
| @validates("source") | |||
| def check_source(self, source): | |||
| @@ -170,6 +187,38 @@ class OptimizerConfig(Schema): | |||
| target = fields.Dict(required=True, error_messages=dict_err_msg) | |||
| parameters = fields.Dict(required=True, error_messages=dict_err_msg) | |||
| def _check_tunable_system_parameters(self, name, value): | |||
| """Check tunable system parameters.""" | |||
| bound = value.get(HyperParamKey.BOUND.value) | |||
| choice = value.get(HyperParamKey.CHOICE.value) | |||
| param_type = value.get(HyperParamKey.TYPE.value) | |||
| err_msg = "The value(s) should be positive number." | |||
| if bound is not None and bound[0] <= 0: | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.BOUND.value)) | |||
| if choice is not None and min(choice) <= 0: | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.CHOICE.value)) | |||
| if name == TunableSystemDefinedParams.LEARNING_RATE.value: | |||
| if bound is not None and bound[1] > 1: | |||
| err_msg = "The upper bound should be less than and equal to 1." | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.BOUND.value)) | |||
| if choice is not None and max(choice) >= 1: | |||
| err_msg = "The values should be float number less than to 1." | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.CHOICE.value)) | |||
| if param_type == HyperParamType.INT.value: | |||
| err_msg = "The value(s) should be float number, please config it as %s." % HyperParamType.FLOAT.value | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.TYPE.value)) | |||
| else: | |||
| if choice is not None and list(filter(lambda x: not isinstance(x, int), choice)): | |||
| # if the choice contains value(s) which is not integer | |||
| err_msg = "The value(s) should be integer." | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.CHOICE.value)) | |||
| if bound is not None and param_type != HyperParamType.INT.value: | |||
| # if bound is configured, need to config its type as int. | |||
| err_msg = "The value(s) should be integer, please config its type as %r." % HyperParamType.INT.value | |||
| raise ValidationError(_generate_err_msg_for_nested_keys(err_msg, name, HyperParamKey.TYPE.value)) | |||
| @validates("tuner") | |||
| def check_tuner(self, data): | |||
| """Check tuner.""" | |||
| @@ -185,10 +234,16 @@ class OptimizerConfig(Schema): | |||
| if err: | |||
| raise ValidationError({name: err}) | |||
| if HyperParamKey.SOURCE.value not in value: | |||
| source = value.get(HyperParamKey.SOURCE.value) | |||
| if source in [None, HyperParamSource.SYSTEM_DEFINED.value] and \ | |||
| name in TunableSystemDefinedParams.list_members(): | |||
| self._check_tunable_system_parameters(name, value) | |||
| if source is None: | |||
| # if params is in system_defined keys, group will be 'system_defined', else will be 'user_defined'. | |||
| continue | |||
| source = value.get(HyperParamKey.SOURCE.value) | |||
| if source == HyperParamSource.SYSTEM_DEFINED.value and \ | |||
| name not in TunableSystemDefinedParams.list_members(): | |||
| raise ValidationError({ | |||
| @@ -13,7 +13,9 @@ | |||
| # limitations under the License. | |||
| # ============================================================================ | |||
| """Utils for params.""" | |||
| import math | |||
| import numpy as np | |||
| from mindinsight.lineagemgr.model import LineageTable, USER_DEFINED_PREFIX, METRIC_PREFIX | |||
| from mindinsight.optimizer.common.enums import HyperParamKey, HyperParamType, HyperParamSource, TargetKey, \ | |||
| TargetGoal, TunableSystemDefinedParams, TargetGroup, SystemDefinedTargets | |||
| @@ -56,16 +58,21 @@ def match_value_type(array, params_info: dict): | |||
| index = 0 | |||
| for _, param_info in params_info.items(): | |||
| value = array[index] | |||
| if HyperParamKey.BOUND.value in param_info: | |||
| bound = param_info[HyperParamKey.BOUND.value] | |||
| bound = param_info.get(HyperParamKey.BOUND.value) | |||
| choice = param_info.get(HyperParamKey.CHOICE.value) | |||
| if bound is not None: | |||
| value = max(bound[0], array[index]) | |||
| value = min(bound[1], value) | |||
| if HyperParamKey.CHOICE.value in param_info: | |||
| choices = param_info[HyperParamKey.CHOICE.value] | |||
| nearest_index = int(np.argmin(np.fabs(np.array(choices) - value))) | |||
| value = choices[nearest_index] | |||
| if choice is not None: | |||
| nearest_index = int(np.argmin(np.fabs(np.array(choice) - value))) | |||
| value = choice[nearest_index] | |||
| if param_info.get(HyperParamKey.TYPE.value) == HyperParamType.INT.value: | |||
| value = int(value) | |||
| if bound is not None and value < bound[0]: | |||
| value = math.ceil(bound[0]) | |||
| elif bound is not None and value >= bound[1]: | |||
| # bound[1] is 2.0, value is 1; bound[1] is 2.1, value is 2 | |||
| value = math.floor(bound[1]) - 1 | |||
| if HyperParamKey.DECIMAL.value in param_info: | |||
| value = np.around(value, decimals=param_info[HyperParamKey.DECIMAL.value]) | |||
| array_new.append(value) | |||
| @@ -15,6 +15,8 @@ | |||
| """Test optimizer config schema.""" | |||
| from copy import deepcopy | |||
| import pytest | |||
| from mindinsight.optimizer.common.validator.optimizer_config import OptimizerConfig | |||
| from mindinsight.optimizer.common.enums import TargetGroup, HyperParamSource | |||
| @@ -105,7 +107,7 @@ class TestOptimizerConfig: | |||
| 'choice': { | |||
| 0: ['Value(s) should be integer or float.'] | |||
| }, | |||
| 'type': ["The type should be in ['int', 'float']."] | |||
| 'type': ["It should be in ['int', 'float']."] | |||
| } | |||
| }, | |||
| 'target': { | |||
| @@ -161,3 +163,98 @@ class TestOptimizerConfig: | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| def test_learning_rate(self): | |||
| """Test parameters combination.""" | |||
| config_dict = deepcopy(self._config_dict) | |||
| config_dict['parameters']['learning_rate']['bounds'] = [-0.1, 1] | |||
| expected_err = { | |||
| 'parameters': { | |||
| 'learning_rate': { | |||
| 'bounds': 'The value(s) should be positive number.' | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| config_dict['parameters']['learning_rate']['bounds'] = [0.1, 1.1] | |||
| expected_err = { | |||
| 'parameters': { | |||
| 'learning_rate': { | |||
| 'bounds': 'The upper bound should be less than and equal to 1.' | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| config_dict['parameters']['learning_rate']['type'] = 'int' | |||
| expected_err = { | |||
| 'parameters': { | |||
| 'learning_rate': { | |||
| 'bounds': 'The upper bound should be less than and equal to 1.' | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| @pytest.mark.parametrize("param_name", ['batch_size', 'epoch']) | |||
| def test_batch_size_and_epoch(self, param_name): | |||
| """Test parameters combination.""" | |||
| config_dict = deepcopy(self._config_dict) | |||
| config_dict['parameters'] = {} | |||
| config_dict['parameters'][param_name] = {'choice': [-0.1, 1]} | |||
| expected_err = { | |||
| 'parameters': { | |||
| param_name: { | |||
| 'choice': 'The value(s) should be positive number.' | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| config_dict['parameters'][param_name] = {'choice': [0.1, 0.2]} | |||
| expected_err = { | |||
| 'parameters': { | |||
| param_name: { | |||
| 'choice': 'The value(s) should be integer.' | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| config_dict['parameters'] = {} | |||
| config_dict['parameters'][param_name] = { | |||
| 'bounds': [1, 22], | |||
| 'type': 'float' | |||
| } | |||
| expected_err = { | |||
| 'parameters': { | |||
| param_name: { | |||
| 'type': "The value(s) should be integer, please config its type as 'int'." | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||
| config_dict['parameters'] = {} | |||
| config_dict['parameters'][param_name] = { | |||
| 'bounds': [0.1, 1.2], | |||
| 'type': 'float' | |||
| } | |||
| expected_err = { | |||
| 'parameters': { | |||
| param_name: { | |||
| 'type': "The value(s) should be integer, please config its type as 'int'." | |||
| } | |||
| } | |||
| } | |||
| err = OptimizerConfig().validate(config_dict) | |||
| assert expected_err == err | |||