Browse Source

!914 add st for tuner and hyper config

From: @luopengting
Reviewed-by: @ouwenchang,@wenkai_dist
Signed-off-by: @wenkai_dist
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
cb3fce4c74
3 changed files with 157 additions and 8 deletions
  1. +17
    -6
      tests/st/func/optimizer/targets/test_targets_restful_api.py
  2. +129
    -0
      tests/st/func/optimizer/test_hyper_config.py
  3. +11
    -2
      tests/utils/tools.py

+ 17
- 6
tests/st/func/optimizer/targets/test_targets_restful_api.py View File

@@ -14,26 +14,30 @@
# ============================================================================
"""Test targets restful api."""
import json
import os
import shutil

import pytest

from ..conftest import MOCK_DATA_MANAGER, SUMMARY_BASE_DIR
from .....utils.lineage_writer import LineageWriter
from .....utils.lineage_writer.base import Metadata
from tests.utils.lineage_writer import LineageWriter
from tests.utils.lineage_writer.base import Metadata
from tests.st.func.optimizer.conftest import MOCK_DATA_MANAGER, SUMMARY_BASE_DIR

BASE_URL = '/v1/mindinsight/optimizer/targets/search'


class TestTargets:
"""Test Histograms."""
"""Test Targets."""

def setup_class(self):
"""Setup class."""
learning_rate = [0.01, 0.001, 0.02, 0.04, 0.05]
acc = [0.8, 0.9, 0.8, 0.7, 0.6]
self._train_ids = []
train_id_prefix = 'train_'
params = {}
for i, lr in enumerate(learning_rate):
train_id = f'./train_{i + 1}'
train_id = f'./{train_id_prefix}{i + 1}'
self._train_ids.append(train_id)
params.update({
train_id: {
@@ -43,10 +47,17 @@ class TestTargets:
})

lineage_writer = LineageWriter(SUMMARY_BASE_DIR)
lineage_writer.create_summaries(train_job_num=5, params=params)
lineage_writer.create_summaries(train_id_prefix=train_id_prefix, train_job_num=5, params=params)

MOCK_DATA_MANAGER.start_load_data().join()

def teardown_class(self):
"""Delete the summary directory."""
for train_id in self._train_ids:
summary_dir = os.path.join(SUMMARY_BASE_DIR, train_id)
if os.path.exists(summary_dir):
shutil.rmtree(summary_dir)

@pytest.mark.level0
@pytest.mark.env_single
@pytest.mark.platform_x86_cpu


+ 129
- 0
tests/st/func/optimizer/test_hyper_config.py View File

@@ -0,0 +1,129 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Test tuner and hyper config."""
import json
import os
import shutil

import pytest

from mindinsight.optimizer.tuner import Tuner
from mindinsight.optimizer.hyper_config import HyperConfig

from tests.utils.lineage_writer import LineageWriter
from tests.utils.lineage_writer.base import Metadata
from tests.utils.tools import convert_dict_to_yaml
from tests.st.func.optimizer.conftest import SUMMARY_BASE_DIR


def _create_summaries(summary_base_dir):
"""Create summaries."""
learning_rate = [0.01, 0.001, 0.02, 0.04, 0.05]
acc = [0.8, 0.9, 0.8, 0.7, 0.6]
momentum = [0.8, 0.9, 0.8, 0.9, 0.7]
train_ids = []
train_id_prefix = 'train_'
params = {}
for i, lr in enumerate(learning_rate):
train_id = f'./{train_id_prefix}{i + 1}'
train_ids.append(train_id)
params.update({
train_id: {
'train': {
Metadata.learning_rate: lr
},
'eval': {
Metadata.metrics: json.dumps({'acc': acc[i]}),
'user_defined_info': {'momentum': momentum[i]}
}
}
})

lineage_writer = LineageWriter(summary_base_dir)
lineage_writer.create_summaries(train_id_prefix=train_id_prefix, train_job_num=5, params=params)

return train_ids


def _prepare_script_and_yaml(output_dir, script_name='test.py', yaml_name='config.yaml'):
"""Prepare script and yaml file."""
script_path = os.path.join(output_dir, script_name)
with open(script_path, 'w'):
pass
config_dict = {
'command': 'python %s' % script_path,
'summary_base_dir': SUMMARY_BASE_DIR,
'tuner': {
'name': 'gp',
},
'target': {
'group': 'metric',
'name': 'acc',
'goal': 'maximize'
},
'parameters': {
'learning_rate': {
'bounds': [0.0001, 0.01],
'type': 'float'
},
'momentum': {
'choice': [0.8, 0.9]
}
}
}
convert_dict_to_yaml(config_dict, output_dir, yaml_name)
return script_path, os.path.join(output_dir, yaml_name)


class TestHyperConfig:
"""Test HyperConfig."""
def setup_class(self):
"""Setup class."""
self._generated_file_path = []
self._train_ids = _create_summaries(SUMMARY_BASE_DIR)
script_path, self._yaml_path = _prepare_script_and_yaml(SUMMARY_BASE_DIR)
self._generated_file_path.append(script_path)
self._generated_file_path.append(self._yaml_path)

def teardown_class(self):
"""Delete the files generated before."""
for train_id in self._train_ids:
summary_dir = os.path.join(SUMMARY_BASE_DIR, train_id)
if os.path.exists(summary_dir):
shutil.rmtree(summary_dir)
for file in self._generated_file_path:
if os.path.exists(file):
os.remove(file)

@pytest.mark.level0
@pytest.mark.env_single
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.usefixtures("init_summary_logs")
def test_tuner_success(self):
"""Test tuner successfully."""
tuner = Tuner(self._yaml_path)
tuner.optimize()

hyper_config = HyperConfig()

params = hyper_config.params
assert list(params.keys()) == ['learning_rate', 'momentum']
assert 0.0001 <= params.learning_rate < 0.01
assert params.momentum in [0.8, 0.9]

assert list(hyper_config.custom_lineage_data.keys()) == ['momentum']

+ 11
- 2
tests/utils/tools.py View File

@@ -16,12 +16,13 @@
Description: This file is used for some common util.
"""
import io
import json
import os
import shutil
import json
from pathlib import Path

from urllib.parse import urlencode

import yaml
import numpy as np
from PIL import Image

@@ -161,3 +162,11 @@ def get_relative_path(path, base_path):
if r_path == ".":
r_path = ""
return os.path.join("./", r_path)


def convert_dict_to_yaml(value: dict, output_dir, file_name='config.yaml'):
"""Write dict to yaml file."""
yaml_file = os.path.join(output_dir, file_name)
with open(yaml_file, 'w', encoding='utf-8') as file:
yaml.dump(value, file)
return yaml_file

Loading…
Cancel
Save