Skip to content

Instantly share code, notes, and snippets.

@Shaddyjr
Last active August 17, 2019 02:34
Show Gist options
  • Save Shaddyjr/83a968d15961eb7b052fb2ce81c6aac0 to your computer and use it in GitHub Desktop.
Save Shaddyjr/83a968d15961eb7b052fb2ce81c6aac0 to your computer and use it in GitHub Desktop.
kernel_size = 3
dropout = .5
activation_func = "relu"
input_shape = (54, 50, 3)
conv__filters_1 = 32
conv__filters_2 = 48
conv__filters_3 = 64
density_units_1 = 256
density_units_2 = 64
epochs = 50
model = Sequential([
# Conv layer #1
Conv2D(
filters = conv__filters_1,
kernel_size = kernel_size + 4,
activation = activation_func,
input_shape = input_shape, #input layer
padding = "same"
),
Conv2D(filters = conv__filters_1, kernel_size = kernel_size + 4, activation = activation_func, padding = "same"),
MaxPooling2D(pool_size=(2,2)),
Dropout(dropout/2),
# Conv layer #2
Conv2D(filters = conv__filters_2, kernel_size = kernel_size + 2, activation=activation_func, padding = "same"),
Conv2D(filters = conv__filters_2, kernel_size = kernel_size + 2, activation = activation_func, padding = "same"),
MaxPooling2D(pool_size=(2,2)),
Dropout(dropout/2),
# Conv layer #3
Conv2D(filters = conv__filters_3, kernel_size = kernel_size, activation=activation_func, padding = "same"),
Conv2D(filters = conv__filters_3, kernel_size = kernel_size, activation = activation_func, padding = "same"),
MaxPooling2D(pool_size=(2,2)),
Dropout(dropout/2),
# Dense layer #1
Flatten(),
Dense(density_units_1, activation=activation_func),
Dropout(dropout),
# Dense layer #2
Dense(density_units_2, activation=activation_func),
Dropout(dropout),
# Output layer
Dense(1, activation='sigmoid')
])
model.compile(
loss = 'binary_crossentropy',
optimizer = RMSprop(lr=0.0001, decay=1e-6),
metrics = ['binary_accuracy']
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment