You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

DenseNet.py 5.9 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. import tensorflow as tf
  2. class BottleNeck(tf.keras.layers.Layer):
  3. def __init__(self, growth_rate, drop_rate):
  4. super(BottleNeck, self).__init__()
  5. self.bn1 = tf.keras.layers.BatchNormalization()
  6. self.conv1 = tf.keras.layers.Conv2D(filters=4 * growth_rate,
  7. kernel_size=(1, 1),
  8. strides=1,
  9. padding="same")
  10. self.bn2 = tf.keras.layers.BatchNormalization()
  11. self.conv2 = tf.keras.layers.Conv2D(filters=growth_rate,
  12. kernel_size=(3, 3),
  13. strides=1,
  14. padding="same")
  15. self.dropout = tf.keras.layers.Dropout(rate=drop_rate)
  16. self.listLayers = [self.bn1,
  17. tf.keras.layers.Activation("relu"),
  18. self.conv1,
  19. self.bn2,
  20. tf.keras.layers.Activation("relu"),
  21. self.conv2,
  22. self.dropout]
  23. def call(self, x):
  24. y = x
  25. for layer in self.listLayers.layers:
  26. y = layer(y)
  27. y = tf.keras.layers.concatenate([x, y], axis=-1)
  28. return y
  29. class DenseBlock(tf.keras.layers.Layer):
  30. def __init__(self, num_layers, growth_rate, drop_rate=0.5):
  31. super(DenseBlock, self).__init__()
  32. self.num_layers = num_layers
  33. self.growth_rate = growth_rate
  34. self.drop_rate = drop_rate
  35. self.listLayers = []
  36. for _ in range(num_layers):
  37. self.listLayers.append(BottleNeck(growth_rate=self.growth_rate, drop_rate=self.drop_rate))
  38. def call(self, x):
  39. for layer in self.listLayers.layers:
  40. x = layer(x)
  41. return x
  42. class TransitionLayer(tf.keras.layers.Layer):
  43. def __init__(self, out_channels):
  44. super(TransitionLayer, self).__init__()
  45. self.bn = tf.keras.layers.BatchNormalization()
  46. self.conv = tf.keras.layers.Conv2D(filters=out_channels,
  47. kernel_size=(1, 1),
  48. strides=1,
  49. padding="same")
  50. self.pool = tf.keras.layers.MaxPool2D(pool_size=(2, 2),
  51. strides=2,
  52. padding="same")
  53. def call(self, inputs):
  54. x = self.bn(inputs)
  55. x = tf.keras.activations.relu(x)
  56. x = self.conv(x)
  57. x = self.pool(x)
  58. return x
  59. class DenseNet(tf.keras.Model):
  60. def __init__(self, num_init_features, growth_rate, block_layers, compression_rate, drop_rate):
  61. super(DenseNet, self).__init__()
  62. self.conv = tf.keras.layers.Conv2D(filters=num_init_features,
  63. kernel_size=(7, 7),
  64. strides=2,
  65. padding="same")
  66. self.bn = tf.keras.layers.BatchNormalization()
  67. self.pool = tf.keras.layers.MaxPool2D(pool_size=(3, 3),
  68. strides=2,
  69. padding="same")
  70. self.num_channels = num_init_features
  71. self.dense_block_1 = DenseBlock(num_layers=block_layers[0], growth_rate=growth_rate, drop_rate=drop_rate)
  72. self.num_channels += growth_rate * block_layers[0]
  73. self.num_channels = compression_rate * self.num_channels
  74. self.transition_1 = TransitionLayer(out_channels=int(self.num_channels))
  75. self.dense_block_2 = DenseBlock(num_layers=block_layers[1], growth_rate=growth_rate, drop_rate=drop_rate)
  76. self.num_channels += growth_rate * block_layers[1]
  77. self.num_channels = compression_rate * self.num_channels
  78. self.transition_2 = TransitionLayer(out_channels=int(self.num_channels))
  79. self.dense_block_3 = DenseBlock(num_layers=block_layers[2], growth_rate=growth_rate, drop_rate=drop_rate)
  80. self.num_channels += growth_rate * block_layers[2]
  81. self.num_channels = compression_rate * self.num_channels
  82. self.transition_3 = TransitionLayer(out_channels=int(self.num_channels))
  83. self.dense_block_4 = DenseBlock(num_layers=block_layers[3], growth_rate=growth_rate, drop_rate=drop_rate)
  84. self.avgpool = tf.keras.layers.GlobalAveragePooling2D()
  85. self.fc = tf.keras.layers.Dense(units=10,
  86. activation=tf.keras.activations.softmax)
  87. def call(self, inputs):
  88. x = self.conv(inputs)
  89. x = self.bn(x)
  90. x = tf.keras.activations.relu(x)
  91. x = self.pool(x)
  92. x = self.dense_block_1(x)
  93. x = self.transition_1(x)
  94. x = self.dense_block_2(x)
  95. x = self.transition_2(x)
  96. x = self.dense_block_3(x)
  97. x = self.transition_3(x,)
  98. x = self.dense_block_4(x)
  99. x = self.avgpool(x)
  100. x = self.fc(x)
  101. return x
  102. def densenet():
  103. return DenseNet(num_init_features=64, growth_rate=32, block_layers=[4,4,4,4], compression_rate=0.5, drop_rate=0.5)
  104. mynet=densenet()
  105. (x_train, y_train), (x_test, y_test) = tf.keras.datasets.fashion_mnist.load_data()
  106. x_train = x_train.reshape((60000, 28, 28, 1)).astype('float32') / 255
  107. x_test = x_test.reshape((10000, 28, 28, 1)).astype('float32') / 255
  108. mynet.compile(loss='sparse_categorical_crossentropy',
  109. optimizer=tf.keras.optimizers.Adam(),
  110. metrics=['accuracy'])
  111. history = mynet.fit(x_train, y_train,
  112. batch_size=64,
  113. epochs=5,
  114. validation_split=0.2)
  115. test_scores = mynet.evaluate(x_test, y_test, verbose=2)

TensorLayer3.0 是一款兼容多种深度学习框架为计算后端的深度学习库。计划兼容TensorFlow, Pytorch, MindSpore, Paddle.