Merge pull request !755 from gzhcv/uttags/v1.1.0
| @@ -1,112 +0,0 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| """Profiler utils.""" | |||||
| import os | |||||
| import re | |||||
| def fwrite_format(output_data_path, data_source=None, is_print=False, is_start=False): | |||||
| """ | |||||
| Write data to the output file. | |||||
| Args: | |||||
| output_data_path (str): The output file path of the data. | |||||
| data_source (list): The data to write. | |||||
| is_print (bool): whether to print the data to stdout. | |||||
| is_start (bool): Whether is the first line of the output file, will remove the old file if True." | |||||
| """ | |||||
| if is_start is True and os.path.exists(output_data_path): | |||||
| os.remove(output_data_path) | |||||
| if data_source.startswith("title:"): | |||||
| title_label = '=' * 20 | |||||
| data_source = title_label + data_source[6:] + title_label | |||||
| with open(output_data_path, 'a+') as f: | |||||
| f.write(data_source) | |||||
| f.write("\n") | |||||
| if is_print: | |||||
| print(data_source) | |||||
| def get_log_slice_id(file_name): | |||||
| pattern = re.compile(r'(?<=slice_)\d+') | |||||
| slice_list = pattern.findall(file_name) | |||||
| index = re.findall(r'\d+', slice_list[0]) | |||||
| return int(index[0]) | |||||
| def get_file_join_name(input_path, file_name): | |||||
| """ | |||||
| Search files under the special path, and will join all the files to one file. | |||||
| Args: | |||||
| input_path (str): The source path, will search files under it. | |||||
| file_name (str): The target of the filename, such as 'hwts.log.data.45.dev'. | |||||
| Returns: | |||||
| str, the join file name. | |||||
| """ | |||||
| name_list = [] | |||||
| file_join_name = '' | |||||
| input_path = os.path.realpath(input_path) | |||||
| if os.path.exists(input_path): | |||||
| files = os.listdir(input_path) | |||||
| for f in files: | |||||
| if file_name in f and not f.endswith('.done') and not f.endswith('.join') \ | |||||
| and not f.endswith('.zip'): | |||||
| name_list.append(f) | |||||
| # resort name_list | |||||
| name_list.sort(key=get_log_slice_id) | |||||
| if len(name_list) == 1: | |||||
| file_join_name = os.path.join(input_path, name_list[0]) | |||||
| elif len(name_list) > 1: | |||||
| file_join_name = os.path.join(input_path, '%s.join' % file_name) | |||||
| if os.path.exists(file_join_name): | |||||
| os.remove(file_join_name) | |||||
| with open(file_join_name, 'ab') as bin_data: | |||||
| for i in name_list: | |||||
| file = input_path + os.sep + i | |||||
| with open(file, 'rb') as txt: | |||||
| bin_data.write(txt.read()) | |||||
| return file_join_name | |||||
| def get_file_names(input_path, file_name): | |||||
| """ | |||||
| Search files under the special path. | |||||
| Args: | |||||
| input_path (str): The source path, will search files under it. | |||||
| file_name (str): The target of the filename, such as 'host_start_log'. | |||||
| Returns: | |||||
| list, file name list. | |||||
| """ | |||||
| input_path = os.path.realpath(input_path) | |||||
| name_list = [] | |||||
| if os.path.exists(input_path): | |||||
| files = os.listdir(input_path) | |||||
| for f in files: | |||||
| if file_name in f and not f.endswith('.done') \ | |||||
| and not f.endswith('.zip'): | |||||
| name_list.append(f) | |||||
| break | |||||
| return name_list | |||||
| @@ -61,8 +61,8 @@ class TestValidate: | |||||
| @pytest.mark.platform_arm_ascend_training | @pytest.mark.platform_arm_ascend_training | ||||
| @pytest.mark.platform_x86_gpu_training | @pytest.mark.platform_x86_gpu_training | ||||
| @pytest.mark.platform_x86_ascend_training | @pytest.mark.platform_x86_ascend_training | ||||
| def test_validate_condition_exception(self): | |||||
| """Test the exception of validate condition.""" | |||||
| def test_validate_op_type_exception(self): | |||||
| """Test the operate type exception.""" | |||||
| condition = "not a dict" | condition = "not a dict" | ||||
| exception_message = 'Param type error. Invalid search_condition type, it should be dict.' | exception_message = 'Param type error. Invalid search_condition type, it should be dict.' | ||||
| with pytest.raises(ProfilerParamTypeErrorException) as exc_info: | with pytest.raises(ProfilerParamTypeErrorException) as exc_info: | ||||
| @@ -70,7 +70,6 @@ class TestValidate: | |||||
| assert exc_info.value.error_code == '50546082' | assert exc_info.value.error_code == '50546082' | ||||
| assert exc_info.value.message == exception_message | assert exc_info.value.message == exception_message | ||||
| # test the ProfilerOpTypeException | |||||
| condition_list = [{'op_type': "xxx"}, {}] | condition_list = [{'op_type': "xxx"}, {}] | ||||
| exception_message = "The op_type in search_condition error, The op_type must in " \ | exception_message = "The op_type in search_condition error, The op_type must in " \ | ||||
| "['aicpu_type','aicpu_detail', 'aicore_type', 'aicore_detail', "\ | "['aicpu_type','aicpu_detail', 'aicore_type', 'aicore_detail', "\ | ||||
| @@ -81,7 +80,14 @@ class TestValidate: | |||||
| assert exc_info.value.error_code == '50546183' | assert exc_info.value.error_code == '50546183' | ||||
| assert exc_info.value.message == exception_message | assert exc_info.value.message == exception_message | ||||
| # test the ProfilerGroupConditionException | |||||
| @pytest.mark.level0 | |||||
| @pytest.mark.env_single | |||||
| @pytest.mark.platform_x86_cpu | |||||
| @pytest.mark.platform_arm_ascend_training | |||||
| @pytest.mark.platform_x86_gpu_training | |||||
| @pytest.mark.platform_x86_ascend_training | |||||
| def test_validate_group_condition_exception(self): | |||||
| """test the group condition exception.""" | |||||
| condition_list = [ | condition_list = [ | ||||
| { | { | ||||
| 'op_type': 'aicpu_type', | 'op_type': 'aicpu_type', | ||||
| @@ -0,0 +1,66 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| """ | |||||
| Function: | |||||
| Test profiler to watch the performance of training. | |||||
| Usage: | |||||
| pytest tests/st/func/profiler | |||||
| """ | |||||
| import os | |||||
| import pytest | |||||
| from mindinsight.profiler.analyser.analyser_factory import AnalyserFactory | |||||
| from tests.st.func.profiler import PROFILER_DIR | |||||
| from tests.utils.tools import compare_result_with_file | |||||
| class TestTimelineAnalyser: | |||||
| """Test timeline analyser module.""" | |||||
| def setup_method(self): | |||||
| """Create analyser.""" | |||||
| self.profiler = PROFILER_DIR | |||||
| self.device_id = 0 | |||||
| self._analyser = AnalyserFactory.instance().get_analyser( | |||||
| 'timeline', self.profiler, self.device_id) | |||||
| @pytest.mark.parametrize( | |||||
| "device_target, filename", | |||||
| [('gpu', 'gpu_timeline_display_{}.json'), | |||||
| ('ascend', 'ascend_timeline_display_{}.json')] | |||||
| ) | |||||
| def test_get_display_timeline(self, device_target, filename): | |||||
| """Test the function of get timeline detail data for UI display.""" | |||||
| file_path = os.path.join( | |||||
| self.profiler, | |||||
| filename.format(self.device_id) | |||||
| ) | |||||
| result = self._analyser.get_display_timeline(device_target) | |||||
| compare_result_with_file(result, file_path) | |||||
| @pytest.mark.parametrize( | |||||
| "device_target, filename", | |||||
| [('gpu', 'gpu_timeline_summary_{}.json'), | |||||
| ('ascend', 'ascend_timeline_summary_{}.json')] | |||||
| ) | |||||
| def test_get_timeline_summary(self, device_target, filename): | |||||
| """Test the function of get timeline summary data for UI display.""" | |||||
| file_path = os.path.join( | |||||
| self.profiler, | |||||
| filename.format(self.device_id) | |||||
| ) | |||||
| result = self._analyser.get_timeline_summary(device_target) | |||||
| compare_result_with_file(result, file_path) | |||||
| @@ -18,9 +18,9 @@ import pytest | |||||
| from mindinsight.profiler.common.exceptions.exceptions import \ | from mindinsight.profiler.common.exceptions.exceptions import \ | ||||
| ProfilerParamTypeErrorException, ProfilerDeviceIdException, \ | ProfilerParamTypeErrorException, ProfilerDeviceIdException, \ | ||||
| ProfilerGroupConditionException, ProfilerSortConditionException, \ | ProfilerGroupConditionException, ProfilerSortConditionException, \ | ||||
| ProfilerFilterConditionException | |||||
| ProfilerFilterConditionException, ProfilerOpTypeException | |||||
| from mindinsight.profiler.common.validator.validate import \ | from mindinsight.profiler.common.validator.validate import \ | ||||
| validate_minddata_pipeline_condition | |||||
| validate_minddata_pipeline_condition, validate_condition | |||||
| class TestMinddataPipelineCondition: | class TestMinddataPipelineCondition: | ||||
| @@ -210,3 +210,97 @@ class TestMinddataPipelineCondition: | |||||
| assert exc_info.value.error_code == '50546186' | assert exc_info.value.error_code == '50546186' | ||||
| assert exc_info.value.message == 'The filter_condition in search_condition error, ' \ | assert exc_info.value.message == 'The filter_condition in search_condition error, ' \ | ||||
| 'The item in filter value must be int.' | 'The item in filter value must be int.' | ||||
| class TestValidateCondition: | |||||
| """Test the function of validate condition.""" | |||||
| def test_validate_condition_normal(self): | |||||
| """Test the validate condition of normal input.""" | |||||
| op_type_list = ['aicpu_type', 'aicpu_detail', 'aicore_type', 'aicore_detail', | |||||
| 'gpu_op_type', 'gpu_op_info', 'gpu_cuda_activity'] | |||||
| sort_name_list = ['op_type', 'serial_number', 'op_type', 'op_name', | |||||
| 'op_type', 'op_side', 'name'] | |||||
| for idx, op_type in enumerate(op_type_list): | |||||
| condition = { | |||||
| 'device_id': '0', | |||||
| 'op_type': op_type, | |||||
| 'filter_condition': { | |||||
| 'op_id': 0 | |||||
| }, | |||||
| 'group_condition': { | |||||
| 'limit': 1, | |||||
| 'offset': 1 | |||||
| }, | |||||
| 'sort_condition': { | |||||
| 'name': sort_name_list[idx], | |||||
| 'type': 'ascending' | |||||
| } | |||||
| } | |||||
| validate_condition(condition) | |||||
| def test_validate_condition_param_type_error_exception(self): | |||||
| """Test the exception of parameter type error.""" | |||||
| condition = "not a dict" | |||||
| exception_message = 'Param type error. Invalid search_condition type, it should be dict.' | |||||
| with pytest.raises(ProfilerParamTypeErrorException) as exc_info: | |||||
| validate_condition(condition) | |||||
| assert exc_info.value.error_code == '50546082' | |||||
| assert exc_info.value.message == exception_message | |||||
| def test_validate_condition_op_type_exception(self): | |||||
| """Test the exception of profiler operation type.""" | |||||
| condition_list = [{'op_type': "xxx"}, {}] | |||||
| exception_message = "The op_type in search_condition error, The op_type must in " \ | |||||
| "['aicpu_type','aicpu_detail', 'aicore_type', 'aicore_detail', "\ | |||||
| "'gpu_op_type', 'gpu_op_info', 'gpu_cuda_activity']" | |||||
| for condition in condition_list: | |||||
| with pytest.raises(ProfilerOpTypeException) as exc_info: | |||||
| validate_condition(condition) | |||||
| assert exc_info.value.error_code == '50546183' | |||||
| assert exc_info.value.message == exception_message | |||||
| def test_validate_condition_group_exception(self): | |||||
| """Test the exception of group condition related.""" | |||||
| condition_list = [ | |||||
| { | |||||
| 'op_type': 'aicpu_type', | |||||
| 'group_condition': 0 | |||||
| }, | |||||
| { | |||||
| 'op_type': 'aicpu_type', | |||||
| 'group_condition': {'limit': True} | |||||
| }, | |||||
| { | |||||
| 'op_type': 'aicpu_type', | |||||
| 'group_condition': {'limit': 0} | |||||
| }, | |||||
| { | |||||
| 'op_type': 'aicpu_type', | |||||
| 'group_condition': {'offset': True} | |||||
| }, | |||||
| { | |||||
| 'op_type': 'aicpu_type', | |||||
| 'group_condition': {'offset': -1} | |||||
| }, | |||||
| { | |||||
| 'op_type': 'aicpu_type', | |||||
| 'group_condition': {'offset': 10000000} | |||||
| }, | |||||
| ] | |||||
| exception_message_list = [ | |||||
| "The group condition must be dict.", | |||||
| "The limit must be int.", | |||||
| "The limit must in [1, 100].", | |||||
| "The offset must be int.", | |||||
| "The offset must ge 0.", | |||||
| "The offset must le 1000000." | |||||
| ] | |||||
| exception_message_list = [ | |||||
| 'The group_condition in search_condition error, ' + message | |||||
| for message in exception_message_list | |||||
| ] | |||||
| for idx, condition in enumerate(condition_list): | |||||
| with pytest.raises(ProfilerGroupConditionException) as exc_info: | |||||
| validate_condition(condition) | |||||
| assert exc_info.value.error_code == '50546184' | |||||
| assert exc_info.value.message == exception_message_list[idx] | |||||