Created
November 6, 2018 10:02
-
-
Save jzuern/6caeaae6b58f1a6d90feb5c4d3674bb0 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def resnet_layer(inputs, | |
num_filters=16, | |
kernel_size=3, | |
strides=1, | |
activation='relu', | |
batch_normalization=True, | |
conv_first=True): | |
"""2D Convolution-Batch Normalization-Activation stack builder | |
# Arguments | |
inputs (tensor): input tensor from input image or previous layer | |
num_filters (int): Conv2D number of filters | |
kernel_size (int): Conv2D square kernel dimensions | |
strides (int): Conv2D square stride dimensions | |
activation (string): activation name | |
batch_normalization (bool): whether to include batch normalization | |
conv_first (bool): conv-bn-activation (True) or | |
bn-activation-conv (False) | |
# Returns | |
x (tensor): tensor as input to the next layer | |
""" | |
conv = Conv2D(num_filters, | |
kernel_size=kernel_size, | |
strides=strides, | |
padding='same', | |
kernel_initializer='he_normal', | |
kernel_regularizer=tf.keras.regularizers.l2(1e-4)) | |
x = inputs | |
if conv_first: | |
x = conv(x) | |
if batch_normalization: | |
x = BatchNormalization()(x) | |
if activation is not None: | |
x = Activation(activation)(x) | |
else: | |
if batch_normalization: | |
x = BatchNormalization()(x) | |
if activation is not None: | |
x = Activation(activation)(x) | |
x = conv(x) | |
return x | |
def resnet_v2(input_shape, depth=20, num_classes=10): | |
"""ResNet Version 2 Model builder [b] | |
Stacks of (1 x 1)-(3 x 3)-(1 x 1) BN-ReLU-Conv2D or also known as | |
bottleneck layer | |
First shortcut connection per layer is 1 x 1 Conv2D. | |
Second and onwards shortcut connection is identity. | |
At the beginning of each stage, the feature map size is halved (downsampled) | |
by a convolutional layer with strides=2, while the number of filter maps is | |
doubled. Within each stage, the layers have the same number filters and the | |
same filter map sizes. | |
Features maps sizes: | |
conv1 : 32x32, 16 | |
stage 0: 32x32, 64 | |
stage 1: 16x16, 128 | |
stage 2: 8x8, 256 | |
# Arguments | |
input_shape (tensor): shape of input image tensor | |
depth (int): number of core convolutional layers | |
num_classes (int): number of classes (CIFAR10 has 10) | |
# Returns | |
model (Model): Keras model instance | |
""" | |
if (depth - 2) % 9 != 0: | |
raise ValueError('depth should be 9n+2 (eg 56 or 110 in [b])') | |
# Start model definition. | |
num_filters_in = 16 | |
num_res_blocks = int((depth - 2) / 9) | |
inputs = Input(shape=input_shape) | |
# v2 performs Conv2D with BN-ReLU on input before splitting into 2 paths | |
x = resnet_layer(inputs=inputs, | |
num_filters=num_filters_in, | |
conv_first=True) | |
# Instantiate the stack of residual units | |
for stage in range(3): | |
for res_block in range(num_res_blocks): | |
activation = 'relu' | |
batch_normalization = True | |
strides = 1 | |
if stage == 0: | |
num_filters_out = num_filters_in * 4 | |
if res_block == 0: # first layer and first stage | |
activation = None | |
batch_normalization = False | |
else: | |
num_filters_out = num_filters_in * 2 | |
if res_block == 0: # first layer but not first stage | |
strides = 2 # downsample | |
# bottleneck residual unit | |
y = resnet_layer(inputs=x, | |
num_filters=num_filters_in, | |
kernel_size=1, | |
strides=strides, | |
activation=activation, | |
batch_normalization=batch_normalization, | |
conv_first=False) | |
y = resnet_layer(inputs=y, | |
num_filters=num_filters_in, | |
conv_first=False) | |
y = resnet_layer(inputs=y, | |
num_filters=num_filters_out, | |
kernel_size=1, | |
conv_first=False) | |
if res_block == 0: | |
# linear projection residual shortcut connection to match | |
# changed dims | |
x = resnet_layer(inputs=x, | |
num_filters=num_filters_out, | |
kernel_size=1, | |
strides=strides, | |
activation=None, | |
batch_normalization=False) | |
x = tf.keras.layers.add([x, y]) | |
num_filters_in = num_filters_out | |
# Add classifier on top. | |
# v2 has BN-ReLU before Pooling | |
x = BatchNormalization()(x) | |
x = Activation('relu')(x) | |
x = AveragePooling2D(pool_size=8)(x) | |
y = Flatten()(x) | |
outputs = Dense(num_classes, | |
activation='softmax', | |
kernel_initializer='he_normal')(y) | |
# Instantiate model. | |
model = Model(inputs=inputs, outputs=outputs) | |
model.compile(loss='sparse_categorical_crossentropy', | |
optimizer=tf.keras.optimizers.Adam(lr=hparams.learning_rate), | |
metrics=['accuracy']) | |
return model |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment