You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

mobilenetv2_combined.py 3.7 kB

5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. """mobile net v2"""
  2. from mindspore import nn
  3. from mindspore.ops import operations as P
  4. def make_divisible(input_x, div_by=8):
  5. return int((input_x + div_by) // div_by)
  6. def _conv_bn(in_channel,
  7. out_channel,
  8. ksize,
  9. stride=1):
  10. """Get a conv2d batchnorm and relu layer."""
  11. return nn.SequentialCell(
  12. [nn.Conv2dBnAct(in_channel,
  13. out_channel,
  14. kernel_size=ksize,
  15. stride=stride,
  16. batchnorm=True)])
  17. class InvertedResidual(nn.Cell):
  18. def __init__(self, inp, oup, stride, expend_ratio):
  19. super(InvertedResidual, self).__init__()
  20. self.stride = stride
  21. assert stride in [1, 2]
  22. hidden_dim = int(inp * expend_ratio)
  23. self.use_res_connect = self.stride == 1 and inp == oup
  24. if expend_ratio == 1:
  25. self.conv = nn.SequentialCell([
  26. nn.Conv2dBnAct(hidden_dim,
  27. hidden_dim,
  28. 3,
  29. stride,
  30. group=hidden_dim,
  31. batchnorm=True,
  32. activation='relu6'),
  33. nn.Conv2dBnAct(hidden_dim, oup, 1, 1,
  34. batchnorm=True)
  35. ])
  36. else:
  37. self.conv = nn.SequentialCell([
  38. nn.Conv2dBnAct(inp, hidden_dim, 1, 1,
  39. batchnorm=True,
  40. activation='relu6'),
  41. nn.Conv2dBnAct(hidden_dim,
  42. hidden_dim,
  43. 3,
  44. stride,
  45. group=hidden_dim,
  46. batchnorm=True,
  47. activation='relu6'),
  48. nn.Conv2dBnAct(hidden_dim, oup, 1, 1,
  49. batchnorm=True)
  50. ])
  51. self.add = P.TensorAdd()
  52. def construct(self, input_x):
  53. out = self.conv(input_x)
  54. if self.use_res_connect:
  55. out = self.add(input_x, out)
  56. return out
  57. class MobileNetV2(nn.Cell):
  58. def __init__(self, num_class=1000, input_size=224, width_mul=1.):
  59. super(MobileNetV2, self).__init__()
  60. _ = input_size
  61. block = InvertedResidual
  62. input_channel = 32
  63. last_channel = 1280
  64. inverted_residual_setting = [
  65. [1, 16, 1, 1],
  66. [6, 24, 2, 2],
  67. [6, 32, 3, 2],
  68. [6, 64, 4, 2],
  69. [6, 96, 3, 1],
  70. [6, 160, 3, 2],
  71. [6, 230, 1, 1],
  72. ]
  73. if width_mul > 1.0:
  74. last_channel = make_divisible(last_channel * width_mul)
  75. self.last_channel = last_channel
  76. features = [_conv_bn(3, input_channel, 3, 2)]
  77. for t, c, n, s in inverted_residual_setting:
  78. out_channel = make_divisible(c * width_mul) if t > 1 else c
  79. for i in range(n):
  80. if i == 0:
  81. features.append(block(input_channel, out_channel, s, t))
  82. else:
  83. features.append(block(input_channel, out_channel, 1, t))
  84. input_channel = out_channel
  85. features.append(_conv_bn(input_channel, self.last_channel, 1))
  86. self.features = nn.SequentialCell(features)
  87. self.mean = P.ReduceMean(keep_dims=False)
  88. self.classifier = nn.DenseBnAct(self.last_channel, num_class)
  89. def construct(self, input_x):
  90. out = input_x
  91. out = self.features(out)
  92. out = self.mean(out, (2, 3))
  93. out = self.classifier(out)
  94. return out