Last active
May 12, 2018 09:01
-
-
Save mikkokotila/8d21297205dc9de24be25a82c1eb7505 to your computer and use it in GitHub Desktop.
an example model for Keras hyperparameter optimization with Talos
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# first we have to make sure to input data and params into the function | |
def breast_cancer_model(x_train, y_train, x_val, y_val, params): | |
# next we can build the model exactly like we would normally do it | |
model = Sequential() | |
model.add(Dense(10, input_dim=x_train.shape[1], | |
activation=params['activation'], | |
kernel_initializer='normal')) | |
model.add(Dropout(params['dropout'])) | |
# if we want to also test for number of layers and shapes, that's possible | |
hidden_layers(model, params, 1) | |
# then we finish again with completely standard Keras way | |
model.add(Dense(1, activation=params['last_activation'], | |
kernel_initializer='normal')) | |
model.compile(loss=params['losses'], | |
# here we add a regulizer normalization function from Talos | |
optimizer=params['optimizer'](lr=lr_normalizer(params['lr'],params['optimizer'])), | |
metrics=['acc', fmeasure]) | |
history = model.fit(x_train, y_train, | |
validation_data=[x_val, y_val], | |
batch_size=params['batch_size'], | |
epochs=params['epochs'], | |
verbose=0) | |
# finally we have to make sure that history object and model are returned | |
return history, model |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment