Skip to content

Instantly share code, notes, and snippets.

@A03ki
Last active August 23, 2024 17:30
Show Gist options
  • Select an option

  • Save A03ki/27ec0cec5aed24c335dcd463350b5041 to your computer and use it in GitHub Desktop.

Select an option

Save A03ki/27ec0cec5aed24c335dcd463350b5041 to your computer and use it in GitHub Desktop.
Implementation of DenseNet with Keras(TensorFlow)

DenseNet

References

Paper

GitHub

Documentation

Requirements

Python 3.5-3.7
TensorFlow 2.x

Examples

DenseNet

bottleneck = False and compression = 1.0

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([1, 2, 3], 12)

x = L.Input((32, 32, 3))
y = densenet(x, bottleneck=False, compression=1.0, dataset=None)
y = L.Dense(10, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

DenseNet-B

bottleneck = True

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([1, 2, 3], 12)

x = L.Input((32, 32, 3))
y = densenet(x, bottleneck=True, compression=1.0, dataset=None)
y = L.Dense(10, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

DenseNet-C

0 < compression < 1

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([1, 2, 3], 12)

x = L.Input((32, 32, 3))
y = densenet(x, bottleneck=False, compression=0.5, dataset=None)
y = L.Dense(10, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

DenseNet-BC

bottleneck = True and 0 < compression < 1

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([1, 2, 3], 12)

x = L.Input((32, 32, 3))
y = densenet(x, bottleneck=True, compression=0.5, dataset=None)
y = L.Dense(10, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

ImageNet

DenseNet121

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([6, 12, 24, 16], 32, use_bias=False)

x = L.Input((224, 224, 3))
y = densenet(x, dataset="imagenet")
y = L.Dense(1000, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

This model is equal to DenseNet121 of the keras applications.

from tensorflow.keras.applications.densenet import DenseNet121


dense121 = DenseNet121(include_top=True, weights="imagenet",
                       input_tensor=None, input_shape=None,
                       pooling=None, classes=1000)

dense121.save_weights("weights.h5")
model.load_weights("weights.h5")

DenseNet169

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([6, 12, 32, 32], 32, use_bias=False)

x = L.Input((224, 224, 3))
y = densenet(x, dataset="imagenet")
y = L.Dense(1000, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

This model is equal to DenseNet169 of the keras applications.

from tensorflow.keras.applications.densenet import DenseNet169


densenet169 = DenseNet169(include_top=True, weights="imagenet",
                          input_tensor=None, input_shape=None,
                          pooling=None, classes=1000)

densenet169.save_weights("weights.h5")
model.load_weights("weights.h5")

DenseNet201

import tensorflow.keras.layers as L
from tensorflow.keras.models import Model

from densenet import DenseNet


densenet = DenseNet([6, 12, 48, 32], 32, use_bias=False)

x = L.Input((224, 224, 3))
y = densenet(x, dataset="imagenet")
y = L.Dense(1000, activation="softmax")(y)

model = Model(inputs=x, outputs=y)
model.summary()

This model is equal to DenseNet201 of the keras applications.

from tensorflow.keras.applications.densenet import DenseNet201


densenet201 = DenseNet201(include_top=True, weights="imagenet",
                          input_tensor=None, input_shape=None,
                          pooling=None, classes=1000)

densenet201.save_weights("weights.h5")
model.load_weights("weights.h5")

License

The MIT License.

import tensorflow.keras.layers as L
class DenseNet:
def __init__(self, blocks, growth_rate, dropout_rate=0.0, use_bias=True):
self.blocks = blocks
self.growth_rate = growth_rate
self.dropout_rate = dropout_rate
self.use_bias = use_bias
def __call__(self, x, bottleneck=True, compression=0.5,
dataset="imagenet"):
if (compression != 1.0 and bottleneck) or dataset == "imagenet":
channels = self.growth_rate * 2
else:
channels = 16
x = self.first_conv2d(x, channels, dataset)
for i, n_blocks in enumerate(self.blocks):
if i != 0:
x = self.transition_layer(x, compression=compression)
x = self.dense_block(x, n_blocks, bottleneck=bottleneck)
x = L.BatchNormalization()(x)
x = L.Activation("relu")(x)
return L.GlobalAveragePooling2D()(x)
def first_conv2d(self, x, channels, dataset):
kernel_size = (7, 7) if dataset == "imagenet" else (3, 3)
x = self._conv2d(x, channels, kernel_size)
if dataset == "imagenet":
x = L.BatchNormalization()(x)
x = L.Activation("relu")(x)
x = L.MaxPooling2D((3, 3), strides=2, padding="same")(x)
return x
def convolution_block(self, x, bottleneck=True):
if bottleneck:
x = self.bn_relu_conv2d(x, self.growth_rate * 4, (1, 1))
return self.bn_relu_conv2d(x, self.growth_rate, (3, 3))
def dense_block(self, x, n_blocks, bottleneck=True):
for i in range(n_blocks):
x = self._dense_block(x, bottleneck=bottleneck)
return x
def _dense_block(self, x, bottleneck=True):
bypass = self.convolution_block(x, bottleneck=bottleneck)
return L.Concatenate()([x, bypass])
def transition_layer(self, x, compression=0.5):
output_channels = int(x.shape[-1] * compression)
x = self.bn_relu_conv2d(x, output_channels, (1, 1))
return L.AveragePooling2D((2, 2))(x)
def bn_relu_conv2d(self, x, output_channels, kernel):
x = L.BatchNormalization()(x)
x = L.Activation("relu")(x)
return self._conv2d(x, output_channels, kernel,
dropout_rate=self.dropout_rate)
def _conv2d(self, x, output_channels, kernel, padding="same",
dropout_rate=0.0):
x = L.Conv2D(output_channels, kernel, padding=padding,
use_bias=self.use_bias)(x)
if dropout_rate:
x = L.Dropout(dropout_rate)(x)
return x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment