Skip to content

Instantly share code, notes, and snippets.

@KentaKudo
Last active February 5, 2018 18:11
Show Gist options
  • Save KentaKudo/612787568a3efbfd274db71b66009395 to your computer and use it in GitHub Desktop.
Save KentaKudo/612787568a3efbfd274db71b66009395 to your computer and use it in GitHub Desktop.
def alex_net(shape):
from keras.models import Model
from keras.layers import Dense, Conv2D, MaxPooling2D, Flatten, Input, BatchNormalization, Dropout
from keras.regularizers import l2
weight_decay = 1e-4
inputs = Input(shape=shape)
x = Conv2D(96, (5, 5), padding='same', kernel_regularizer=l2(weight_decay), activation='relu')(inputs)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Dropout(0.3)(x)
x = Conv2D(256, (5, 5), padding='same', kernel_regularizer=l2(weight_decay), activation='relu')(x)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Dropout(0.4)(x)
x = Conv2D(384, (5, 5), padding='same', kernel_regularizer=l2(weight_decay), activation='relu')(x)
x = BatchNormalization()(x)
x = Conv2D(384, (5, 5), padding='same', kernel_regularizer=l2(weight_decay), activation='relu')(x)
x = BatchNormalization()(x)
x = Conv2D(256, (5, 5), padding='same', kernel_regularizer=l2(weight_decay), activation='relu')(x)
x = BatchNormalization()(x)
x = Dropout(0.5)(x)
x = Flatten()(x)
x = Dense(1000, activation='relu', kernel_initializer='he_normal')(x)
y = Dense(10, activation='softmax')(x)
model = Model(inputs=inputs, outputs=y)
model.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment