You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_onnx.py 7.8 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """ut for model serialize(save/load)"""
  16. import os
  17. import stat
  18. import numpy as np
  19. import pytest
  20. import mindspore.nn as nn
  21. from mindspore import context
  22. from mindspore.common.parameter import Parameter
  23. from mindspore.common.tensor import Tensor
  24. from mindspore.ops import operations as P
  25. from mindspore.train.serialization import export
  26. context.set_context(mode=context.GRAPH_MODE)
  27. def is_enable_onnxruntime():
  28. val = os.getenv("ENABLE_ONNXRUNTIME", "False")
  29. if val in ('ON', 'on', 'TRUE', 'True', 'true'):
  30. return True
  31. return False
  32. run_on_onnxruntime = pytest.mark.skipif(not is_enable_onnxruntime(), reason="Only support running on onnxruntime")
  33. def setup_module():
  34. pass
  35. def teardown_module():
  36. cur_dir = os.path.dirname(os.path.realpath(__file__))
  37. for filename in os.listdir(cur_dir):
  38. if filename.find('ms_output_') == 0 and filename.find('.pb') > 0:
  39. # delete temp files generated by run ut
  40. os.chmod(filename, stat.S_IWRITE)
  41. os.remove(filename)
  42. class BatchNormTester(nn.Cell):
  43. "used to test exporting network in training mode in onnx format"
  44. def __init__(self, num_features):
  45. super(BatchNormTester, self).__init__()
  46. self.bn = nn.BatchNorm2d(num_features)
  47. def construct(self, x):
  48. return self.bn(x)
  49. def test_batchnorm_train_onnx_export():
  50. "test onnx export interface does not modify trainable flag of a network"
  51. input_ = Tensor(np.ones([1, 3, 32, 32]).astype(np.float32) * 0.01)
  52. net = BatchNormTester(3)
  53. net.set_train()
  54. if not net.training:
  55. raise ValueError('netowrk is not in training mode')
  56. onnx_file = 'batch_norm.onnx'
  57. export(net, input_, file_name=onnx_file, file_format='ONNX')
  58. if not net.training:
  59. raise ValueError('netowrk is not in training mode')
  60. # check existence of exported onnx file and delete it
  61. assert os.path.exists(onnx_file)
  62. os.chmod(onnx_file, stat.S_IWRITE)
  63. os.remove(onnx_file)
  64. class LeNet5(nn.Cell):
  65. """LeNet5 definition"""
  66. def __init__(self):
  67. super(LeNet5, self).__init__()
  68. self.conv1 = nn.Conv2d(1, 6, 5, pad_mode='valid')
  69. self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')
  70. self.fc1 = nn.Dense(16 * 5 * 5, 120)
  71. self.fc2 = nn.Dense(120, 84)
  72. self.fc3 = nn.Dense(84, 10)
  73. self.relu = nn.ReLU()
  74. self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
  75. self.flatten = P.Flatten()
  76. def construct(self, x):
  77. x = self.max_pool2d(self.relu(self.conv1(x)))
  78. x = self.max_pool2d(self.relu(self.conv2(x)))
  79. x = self.flatten(x)
  80. x = self.relu(self.fc1(x))
  81. x = self.relu(self.fc2(x))
  82. x = self.fc3(x)
  83. return x
  84. class DefinedNet(nn.Cell):
  85. """simple Net definition with maxpoolwithargmax."""
  86. def __init__(self, num_classes=10):
  87. super(DefinedNet, self).__init__()
  88. self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=0, weight_init="zeros")
  89. self.bn1 = nn.BatchNorm2d(64)
  90. self.relu = nn.ReLU()
  91. self.maxpool = P.MaxPoolWithArgmax(padding="same", ksize=2, strides=2)
  92. self.flatten = nn.Flatten()
  93. self.fc = nn.Dense(int(56 * 56 * 64), num_classes)
  94. def construct(self, x):
  95. x = self.conv1(x)
  96. x = self.bn1(x)
  97. x = self.relu(x)
  98. x, argmax = self.maxpool(x)
  99. x = self.flatten(x)
  100. x = self.fc(x)
  101. return x
  102. class DepthwiseConv2dAndReLU6(nn.Cell):
  103. "Net for testing DepthwiseConv2d and ReLU6"
  104. def __init__(self, input_channel, kernel_size):
  105. super(DepthwiseConv2dAndReLU6, self).__init__()
  106. weight_shape = [1, input_channel, kernel_size, kernel_size]
  107. from mindspore.common.initializer import initializer
  108. self.weight = Parameter(initializer('ones', weight_shape), name='weight')
  109. self.depthwise_conv = P.DepthwiseConv2dNative(channel_multiplier=1, kernel_size=(kernel_size, kernel_size))
  110. self.relu6 = nn.ReLU6()
  111. def construct(self, x):
  112. x = self.depthwise_conv(x, self.weight)
  113. x = self.relu6(x)
  114. return x
  115. class DeepFMOpNet(nn.Cell):
  116. """Net definition with Gatherv2 and Tile and Square."""
  117. def __init__(self):
  118. super(DeepFMOpNet, self).__init__()
  119. self.gather = P.GatherV2()
  120. self.square = P.Square()
  121. self.tile = P.Tile()
  122. def construct(self, x, y):
  123. x = self.tile(x, (1000, 1))
  124. x = self.square(x)
  125. x = self.gather(x, y, 0)
  126. return x
  127. # generate mindspore Tensor by shape and numpy datatype
  128. def gen_tensor(shape, dtype=np.float32):
  129. return Tensor(np.ones(shape).astype(dtype))
  130. # ut configs in triple: (ut_name, network, network-input)
  131. net_cfgs = [
  132. ('lenet', LeNet5(), gen_tensor([1, 1, 32, 32])),
  133. ('maxpoolwithargmax', DefinedNet(), gen_tensor([1, 3, 224, 224])),
  134. ('depthwiseconv_relu6', DepthwiseConv2dAndReLU6(3, kernel_size=3), gen_tensor([1, 3, 32, 32])),
  135. ('deepfm_ops', DeepFMOpNet(), (gen_tensor([1, 1]), gen_tensor([1000, 1], dtype=np.int32)))
  136. ]
  137. def get_id(cfg):
  138. _ = cfg
  139. return list(map(lambda x: x[0], net_cfgs))
  140. # use `pytest test_onnx.py::test_onnx_export[name]` or `pytest test_onnx.py::test_onnx_export -k name` to run single ut
  141. @pytest.mark.parametrize('name, net, inp', net_cfgs, ids=get_id(net_cfgs))
  142. def test_onnx_export(name, net, inp):
  143. onnx_file = name + ".onnx"
  144. if isinstance(inp, (tuple, list)):
  145. export(net, *inp, file_name=onnx_file, file_format='ONNX')
  146. else:
  147. export(net, inp, file_name=onnx_file, file_format='ONNX')
  148. # check existence of exported onnx file and delete it
  149. assert os.path.exists(onnx_file)
  150. os.chmod(onnx_file, stat.S_IWRITE)
  151. os.remove(onnx_file)
  152. @run_on_onnxruntime
  153. @pytest.mark.parametrize('name, net, inp', net_cfgs, ids=get_id(net_cfgs))
  154. def test_onnx_export_load_run(name, net, inp):
  155. onnx_file = name + ".onnx"
  156. export(net, inp, file_name=onnx_file, file_format='ONNX')
  157. import onnx
  158. import onnxruntime as ort
  159. print('--------------------- onnx load ---------------------')
  160. # Load the ONNX model
  161. model = onnx.load(onnx_file)
  162. # Check that the IR is well formed
  163. onnx.checker.check_model(model)
  164. # Print a human readable representation of the graph
  165. g = onnx.helper.printable_graph(model.graph)
  166. print(g)
  167. print('------------------ onnxruntime run ------------------')
  168. ort_session = ort.InferenceSession(onnx_file)
  169. input_map = {'x': inp.asnumpy()}
  170. # provide only input x to run model
  171. outputs = ort_session.run(None, input_map)
  172. print(outputs[0])
  173. # overwrite default weight to run model
  174. for item in net.trainable_params():
  175. default_value = item.default_input.asnumpy()
  176. input_map[item.name] = np.ones(default_value.shape, dtype=default_value.dtype)
  177. outputs = ort_session.run(None, input_map)
  178. print(outputs[0])
  179. # check existence of exported onnx file and delete it
  180. assert os.path.exists(onnx_file)
  181. os.chmod(onnx_file, stat.S_IWRITE)
  182. os.remove(onnx_file)