Skip to content

Instantly share code, notes, and snippets.

@dipanjanS
Created September 20, 2019 07:54
Show Gist options
  • Save dipanjanS/cbe7a8a8330cfe8762d5c95ec857ccfe to your computer and use it in GitHub Desktop.
Save dipanjanS/cbe7a8a8330cfe8762d5c95ec857ccfe to your computer and use it in GitHub Desktop.
INPUT_SHAPE = (28, 28, 1)
def create_cnn_architecture_model1(input_shape):
inp = keras.layers.Input(shape=input_shape)
conv1 = keras.layers.Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1),
activation='relu', padding='same')(inp)
pool1 = keras.layers.MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1),
activation='relu', padding='same')(pool1)
pool2 = keras.layers.MaxPooling2D(pool_size=(2, 2))(conv2)
flat = keras.layers.Flatten()(pool2)
hidden1 = keras.layers.Dense(256, activation='relu')(flat)
drop1 = keras.layers.Dropout(rate=0.3)(hidden1)
out = keras.layers.Dense(10, activation='softmax')(drop1)
model = keras.Model(inputs=inp, outputs=out)
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
model = create_cnn_architecture_model1(input_shape=INPUT_SHAPE)
model.summary()
# Output
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 28, 28, 1)] 0
_________________________________________________________________
conv2d (Conv2D) (None, 28, 28, 16) 160
_________________________________________________________________
...
...
dropout (Dropout) (None, 256) 0
_________________________________________________________________
dense_1 (Dense) (None, 10) 2570
=================================================================
Total params: 409,034
Trainable params: 409,034
Non-trainable params: 0
_________________________________________________________________
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment