Last active
November 27, 2018 09:02
-
-
Save miroslavradojevic/24c5148c2f4373be36451ab8505c5609 to your computer and use it in GitHub Desktop.
Convolutional network used to quantify and identify bacteria colony patches.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from keras.models import Sequential | |
from keras.layers.convolutional import Conv2D, MaxPooling2D, ZeroPadding2D | |
from keras.constraints import maxnorm | |
from keras.layers import Flatten, Dense, Dropout, Activation | |
from keras import backend as K | |
from keras.layers.normalization import BatchNormalization | |
from keras.regularizers import l2 | |
def mycnn(img_shape=(32, 32, 1), n_classes=2, l2_reg=0.0, weights=None): | |
K.set_image_data_format("channels_last") | |
# Initialize model | |
model = Sequential() | |
# Layer 1 | |
model.add(Conv2D(96, (3, 3), input_shape=img_shape, padding='same', kernel_regularizer=l2(l2_reg))) | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
# Layer 2 | |
model.add(Conv2D(128, (3, 3), padding='same')) | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
# Layer 3 | |
model.add(ZeroPadding2D((1, 1))) | |
model.add(Conv2D(256, (3, 3), padding='same')) | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
model.add(MaxPooling2D(pool_size=(2, 2))) | |
# Layer 4 | |
model.add(ZeroPadding2D((1, 1))) | |
model.add(Conv2D(256, (3, 3), padding='same')) | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
model.add(MaxPooling2D(pool_size=(2, 2))) | |
# Layer 5 | |
model.add(ZeroPadding2D((1, 1))) | |
model.add(Conv2D(256, (3, 3), padding='same')) | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
model.add(MaxPooling2D(pool_size=(2, 2))) | |
# Layer 6 | |
model.add(Flatten()) | |
model.add(Dense(256)) # 3072 | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
model.add(Dropout(0.5)) | |
# Layer 7 | |
model.add(Dense(128)) # 4096 # 1024 | |
model.add(BatchNormalization()) | |
model.add(Activation('relu')) | |
model.add(Dropout(0.5)) | |
# Layer 8 | |
model.add(Dense(n_classes)) | |
model.add(BatchNormalization()) | |
model.add(Activation('softmax')) | |
if weights is not None: | |
model.load_weights(weights) | |
return model |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment