From 041a3d36cf86965437674a37620a67ee8d3bacf0 Mon Sep 17 00:00:00 2001 From: moran Date: Fri, 30 Apr 2021 10:00:13 +0800 Subject: [PATCH] record traceback to log when third party lib imported incorrectly & add FAQ for third party lib imported incorrectly --- mindinsight/mindconverter/README.md | 4 ++++ mindinsight/mindconverter/README_CN.md | 4 ++++ .../mindconverter/graph_based_converter/common/utils.py | 5 +++-- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/mindinsight/mindconverter/README.md b/mindinsight/mindconverter/README.md index 87640c7d..9ef1379f 100644 --- a/mindinsight/mindconverter/README.md +++ b/mindinsight/mindconverter/README.md @@ -351,6 +351,10 @@ print(api_implementation.Type()) Q4. While converting .pb file to MindSpore script, what may be the cause of error code 1000001 with ensuring `model_file`, `shape`, `iput_nodes` and `output_nodes` set right and third party requirements installed correctly? > Answer: Make sure that the TensorFlow version to generate .pb file is no higher than that to convert .pb file, avoiding the conflict which caused by using low version TensorFlow to parse .pb file generated by high version one. +Q5. What should I do to deal with Exception `[ERROR] MINDCONVERTER: [BaseConverterError] code: 0000000, msg: {python_home}/lib/libgomp.so.1: cannot allocate memory in static TLS block`? + +> Answer: In most cases, the problem is caused by environment variable exported incorrectly. Please set `export LD_PRELOAD={python_home}/lib/libgomp.so.1.0.0`, then try to rerun MindConverter. + ## Appendix ### TensorFlow Pb model exporting diff --git a/mindinsight/mindconverter/README_CN.md b/mindinsight/mindconverter/README_CN.md index 38453ccc..71cc1ac2 100644 --- a/mindinsight/mindconverter/README_CN.md +++ b/mindinsight/mindconverter/README_CN.md @@ -362,6 +362,10 @@ Q4. 使用.pb文件进行转换时,已确定`model_file`,`shape`,`input_no > 答:请检查生成该.pb文件所使用的TensorFlow版本不高于用于转换时安装的TensorFlow版本,避免由于旧版本TensorFlow无法解析新版本生成的.pb文件,而导致的模型文件解析失败。 +Q5. 出现报错信息`[ERROR] MINDCONVERTER: [BaseConverterError] code: 0000000, msg: {python_home}/lib/libgomp.so.1: cannot allocate memory in static TLS block`时,应该怎么处理? + +> 答:该问题通常是由于环境变量导入不正确导致的。建议用户设置`export LD_PRELOAD={python_home}/lib/libgomp.so.1.0.0`这一环境变量,然后重新尝试进行转换。 + ## 附录 ### TensorFlow Pb模型导出 diff --git a/mindinsight/mindconverter/graph_based_converter/common/utils.py b/mindinsight/mindconverter/graph_based_converter/common/utils.py index 2b0e6a75..379cac3c 100644 --- a/mindinsight/mindconverter/graph_based_converter/common/utils.py +++ b/mindinsight/mindconverter/graph_based_converter/common/utils.py @@ -23,6 +23,7 @@ from typing import List, Tuple, Mapping import numpy as np +from mindinsight.mindconverter.common.log import logger as log from mindinsight.mindconverter.common.exceptions import ScriptGenerationError, ReportGenerationError, \ CheckPointGenerationError, WeightMapGenerationError, ModelLoadingError, OnnxModelSaveError from mindinsight.mindconverter.graph_based_converter.constant import SEPARATOR_IN_ONNX_OP, FrameworkType, \ @@ -67,7 +68,8 @@ def check_dependency_integrity(*packages): for pkg in packages: import_module(pkg) return True - except ImportError: + except ImportError as e: + log.exception(e) return False @@ -120,7 +122,6 @@ def fetch_output_from_onnx_model(model, model_path: str, feed_dict: dict, output os.remove(tmp_file) raise OnnxModelSaveError("Onnx model save failed, {}".format(str(error))) - try: sess = ort.InferenceSession(path_or_bytes=tmp_file) fetched_res = sess.run(output_names=output_nodes, input_feed=feed_dict)