Skip to content

Instantly share code, notes, and snippets.

@mehdidc
Created July 12, 2016 16:01
Show Gist options
  • Save mehdidc/8b5089dc7d9149500074a39322b64348 to your computer and use it in GitHub Desktop.
Save mehdidc/8b5089dc7d9149500074a39322b64348 to your computer and use it in GitHub Desktop.
from nolearn.lasagne import NeuralNet, BatchIterator
from lasagne import layers, nonlinearities, updates, init, objectives
import numpy as np
class EarlyStopping(object):
def __init__(self, patience=100, criterion='valid_loss',
criterion_smaller_is_better=True):
self.patience = patience
if criterion_smaller_is_better is True:
self.best_valid = np.inf
else:
self.best_valid = -np.inf
self.best_valid_epoch = 0
self.best_weights = None
self.criterion = criterion
self.criterion_smaller_is_better = criterion_smaller_is_better
def __call__(self, nn, train_history):
current_valid = train_history[-1][self.criterion]
current_epoch = train_history[-1]['epoch']
if self.criterion_smaller_is_better:
cond = current_valid < self.best_valid
else:
cond = current_valid > self.best_valid
if cond:
self.best_valid = current_valid
self.best_valid_epoch = current_epoch
self.best_weights = nn.get_all_params_values()
elif self.best_valid_epoch + self.patience < current_epoch:
if nn.verbose:
print("Early stopping.")
print("Best {:s} was {:.6f} at epoch {}.".format(
self.criterion, self.best_valid, self.best_valid_epoch))
nn.load_weights_from(self.best_weights)
if nn.verbose:
print("Weights set.")
raise StopIteration()
def load_best_weights(self, nn, train_history):
nn.load_weights_from(self.best_weights)
net = NeuralNet(
# Define the architecture here
layers=[
('input', layers.InputLayer),
('hidden1', layers.DenseLayer),
('dropout1', layers.DropoutLayer),
('hidden2', layers.DenseLayer),
('dropout2', layers.DropoutLayer),
('hidden3', layers.DenseLayer),
('output', layers.DenseLayer),
],
# Layers parameters:
input_shape=(None, 100), # Number of input features
hidden1_num_units=1500, # number of units in 1st hidden layer
hidden1_nonlinearity=nonlinearities.rectify,
hidden1_W=init.GlorotUniform(gain='relu'),
dropout1_p=0.5,
hidden2_num_units=1500, # number of units in 2nd hidden layer
hidden2_nonlinearity=nonlinearities.rectify,
hidden2_W=init.GlorotUniform(gain='relu'),
dropout2_p=0.5,
hidden3_num_units=100, # number of units in 3rd hidden layer
hidden3_nonlinearity=nonlinearities.rectify,
hidden3_W=init.GlorotUniform(gain='relu'),
output_num_units=3, # 18 classes
output_W=init.GlorotUniform(),
output_nonlinearity=nonlinearities.softmax,
# Optimization method:
update=updates.adadelta, # The optimization algorithm is Adadelta
update_learning_rate=0.1,
batch_iterator_train=BatchIterator(batch_size=100), # mini-batch size
use_label_encoder=True, # Converts labels of any kind to integers
max_epochs=100, # we want to train this many epochs
verbose=1, # To monitor training at each epoch
# handlers
on_epoch_finished = [EarlyStopping(patience=20, criterion='valid_accuracy',
criterion_smaller_is_better=False)]
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment