You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

densenet.py 8.5 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. import tensorflow as tf
  2. import tensorlayer as tl
  3. config = tf.compat.v1.ConfigProto()
  4. config.gpu_options.allow_growth = True
  5. session = tf.compat.v1.Session(config=config)
  6. class BottleNeck(tl.layers.Module):
  7. def __init__(self, growth_rate, drop_rate):
  8. super(BottleNeck, self).__init__()
  9. self.bn1 = tl.layers.BatchNorm()
  10. self.conv1 = tl.layers.Conv2d(n_filter=4 * growth_rate,
  11. filter_size=(1, 1),
  12. strides=(1,1),
  13. padding="SAME")
  14. self.bn2 = tl.layers.BatchNorm()
  15. self.conv2 = tl.layers.Conv2d(n_filter=growth_rate,
  16. filter_size=(3, 3),
  17. strides=(1,1),
  18. padding="SAME")
  19. self.dropout = tl.layers.Dropout(keep=drop_rate)
  20. self.listLayers = [self.bn1,
  21. tl.layers.PRelu(channel_shared=True),
  22. self.conv1,
  23. self.bn2,
  24. tl.layers.PRelu(channel_shared=True),
  25. self.conv2,
  26. self.dropout]
  27. def forward(self, x):
  28. y = x
  29. for layer in self.listLayers:
  30. y = layer(y)
  31. y = tf.keras.layers.concatenate([x, y], axis=-1)
  32. return y
  33. # 构建密集块
  34. class DenseBlock(tl.layers.Module):
  35. def __init__(self, num_layers, growth_rate, drop_rate=0.5):
  36. super(DenseBlock, self).__init__()
  37. self.num_layers = num_layers
  38. self.growth_rate = growth_rate
  39. self.drop_rate = drop_rate
  40. self.listLayers = []
  41. for _ in range(num_layers):
  42. self.listLayers.append(BottleNeck(growth_rate=self.growth_rate, drop_rate=self.drop_rate))
  43. def forward(self, x):
  44. for layer in self.listLayers:
  45. x = layer(x)
  46. return x
  47. # 构建过渡层
  48. class TransitionLayer(tl.layers.Module):
  49. def __init__(self, out_channels):
  50. super(TransitionLayer, self).__init__()
  51. self.bn = tl.layers.BatchNorm()
  52. self.conv = tl.layers.Conv2d(n_filter=out_channels,
  53. filter_size=(1, 1),
  54. strides=(1,1),
  55. padding="same")
  56. self.pool = tl.layers.MaxPool2d(filter_size=(2, 2),
  57. strides=(2,2),
  58. padding="SAME")
  59. def forward(self, inputs):
  60. x = self.bn(inputs)
  61. x = tl.relu(x)
  62. x = self.conv(x)
  63. x = self.pool(x)
  64. return x
  65. # DenseNet-121,169,201,264模型
  66. class DenseNet(tl.layers.Module):
  67. def __init__(self, num_init_features, growth_rate, block_layers, compression_rate, drop_rate):
  68. super(DenseNet, self).__init__()
  69. self.conv = tl.layers.Conv2d(n_filter=num_init_features,
  70. filter_size=(7, 7),
  71. strides=(2,2),
  72. padding="SAME")
  73. self.bn = tl.layers.BatchNorm()
  74. self.pool = tl.layers.MaxPool2d(filter_size=(3, 3),
  75. strides=(2,2),
  76. padding="SAME")
  77. self.num_channels = num_init_features
  78. self.dense_block_1 = DenseBlock(num_layers=block_layers[0], growth_rate=growth_rate, drop_rate=drop_rate)
  79. self.num_channels += growth_rate * block_layers[0]
  80. self.num_channels = compression_rate * self.num_channels
  81. self.transition_1 = TransitionLayer(out_channels=int(self.num_channels))
  82. self.dense_block_2 = DenseBlock(num_layers=block_layers[1], growth_rate=growth_rate, drop_rate=drop_rate)
  83. self.num_channels += growth_rate * block_layers[1]
  84. self.num_channels = compression_rate * self.num_channels
  85. self.transition_2 = TransitionLayer(out_channels=int(self.num_channels))
  86. self.dense_block_3 = DenseBlock(num_layers=block_layers[2], growth_rate=growth_rate, drop_rate=drop_rate)
  87. self.num_channels += growth_rate * block_layers[2]
  88. self.num_channels = compression_rate * self.num_channels
  89. self.transition_3 = TransitionLayer(out_channels=int(self.num_channels))
  90. self.dense_block_4 = DenseBlock(num_layers=block_layers[3], growth_rate=growth_rate, drop_rate=drop_rate)
  91. self.avgpool = tl.layers.GlobalMeanPool2d()
  92. self.fc = tl.layers.Dense(n_units=10,act=tl.softmax(logits=()))
  93. def forward(self, inputs):
  94. x = self.conv(inputs)
  95. x = self.bn(x)
  96. x = tl.relu(x)
  97. x = self.pool(x)
  98. x = self.dense_block_1(x)
  99. x = self.transition_1(x)
  100. x = self.dense_block_2(x)
  101. x = self.transition_2(x)
  102. x = self.dense_block_3(x)
  103. x = self.transition_3(x,)
  104. x = self.dense_block_4(x)
  105. x = self.avgpool(x)
  106. x = self.fc(x)
  107. return x
  108. # DenseNet-100模型
  109. class DenseNet_100(tl.layers.Module):
  110. def __init__(self, num_init_features, growth_rate, block_layers, compression_rate, drop_rate):
  111. super(DenseNet_100, self).__init__()
  112. self.conv = tl.layers.Conv2d(n_filter=num_init_features,
  113. filter_size=(7, 7),
  114. strides=(2,2),
  115. padding="SAME")
  116. self.bn = tl.layers.BatchNorm()
  117. self.pool = tl.layers.MaxPool2d(filter_size=(3, 3),
  118. strides=(2,2),
  119. padding="SAME")
  120. self.num_channels = num_init_features
  121. self.dense_block_1 = DenseBlock(num_layers=block_layers[0], growth_rate=growth_rate, drop_rate=drop_rate)
  122. self.num_channels += growth_rate * block_layers[0]
  123. self.num_channels = compression_rate * self.num_channels
  124. self.transition_1 = TransitionLayer(out_channels=int(self.num_channels))
  125. self.dense_block_2 = DenseBlock(num_layers=block_layers[1], growth_rate=growth_rate, drop_rate=drop_rate)
  126. self.num_channels += growth_rate * block_layers[1]
  127. self.num_channels = compression_rate * self.num_channels
  128. self.transition_2 = TransitionLayer(out_channels=int(self.num_channels))
  129. self.dense_block_3 = DenseBlock(num_layers=block_layers[2], growth_rate=growth_rate, drop_rate=drop_rate)
  130. self.num_channels += growth_rate * block_layers[2]
  131. self.num_channels = compression_rate * self.num_channels
  132. self.transition_3 = TransitionLayer(out_channels=int(self.num_channels))
  133. self.avgpool = tl.layers.GlobalMeanPool2d()
  134. self.fc = tl.layers.Dense(n_units=10,act=tl.softmax(logits=()))
  135. def forward(self, inputs):
  136. x = self.conv(inputs)
  137. x = self.bn(x)
  138. x = tl.relu(x)
  139. x = self.pool(x)
  140. x = self.dense_block_1(x)
  141. x = self.transition_1(x)
  142. x = self.dense_block_2(x)
  143. x = self.transition_2(x)
  144. x = self.dense_block_3(x)
  145. x = self.transition_3(x,)
  146. x = self.avgpool(x)
  147. # x = tl.layers.Dense(n_units=10,act=tl.softmax(logits=x))
  148. x = self.fc(x)
  149. return x
  150. def densenet(x):
  151. if x == 'densenet-121':
  152. return DenseNet(num_init_features=64, growth_rate=32, block_layers=[6, 12, 24, 16], compression_rate=0.5,
  153. drop_rate=0.5)
  154. elif x == 'densenet-169':
  155. return DenseNet(num_init_features=64, growth_rate=32, block_layers=[6 , 12, 32, 32], compression_rate=0.5,
  156. drop_rate=0.5)
  157. elif x == 'densenet-201':
  158. return DenseNet(num_init_features=64, growth_rate=32, block_layers=[6, 12, 48, 32], compression_rate=0.5,
  159. drop_rate=0.5)
  160. elif x == 'densenet-264':
  161. return DenseNet(num_init_features=64, growth_rate=32, block_layers=[6, 12, 64, 48], compression_rate=0.5,
  162. drop_rate=0.5)
  163. elif x=='densenet-100':
  164. return DenseNet_100(num_init_features=64, growth_rate=12, block_layers=[16, 16, 16], compression_rate=0.5,
  165. drop_rate=0.5)

TensorLayer3.0 是一款兼容多种深度学习框架为计算后端的深度学习库。计划兼容TensorFlow, Pytorch, MindSpore, Paddle.