Skip to content

Instantly share code, notes, and snippets.

@akimach
Created September 23, 2017 04:50
Show Gist options
  • Select an option

  • Save akimach/1e07699d2bf1b4601a886b2dd80bcd82 to your computer and use it in GitHub Desktop.

Select an option

Save akimach/1e07699d2bf1b4601a886b2dd80bcd82 to your computer and use it in GitHub Desktop.
怠け者のためのディープラーニング入門 - ハイパーパラメータのチューニング ref: http://qiita.com/akimach/items/a10154e0f0e6dcaafdd0
import numpy as np
np.random.seed(1234)
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras import optimizers
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.utils import shuffle
from sklearn.model_selection import GridSearchCV
param_grid = {
"activation": ["relu", "tanh", "sigmoid", ],
"learning_rate": [0.0001, 0.001, 0.01],
"units": [64, 128, 256],
}
def mlp(activation="relu", learning_rate=0.001, units=128):
model = Sequential()
model.add(Dense(units=units, input_dim=2))
model.add(Activation(activation))
model.add(Dense(units=units))
model.add(Activation(activation))
model.add(Dense(units=1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer=optimizers.SGD(lr=learning_rate),
metrics=['accuracy',])
return model
n_data = 200
n_class = n_data//2
X_positive = np.random.normal(loc=1.0, scale=1.0, size=(n_class, 2))
X_negative = np.random.normal(loc=-1.0, scale=1.0, size=(n_class, 2))
X = np.r_[X_positive, X_negative]
y = np.r_[[1]*n_class, [0]*n_class]
X, y = shuffle(X, y)
model = KerasClassifier(build_fn=mlp, nb_epoch=50, batch_size=20, verbose=0)
clf = GridSearchCV(estimator=model, param_grid=param_grid, cv=4, scoring='accuracy')
res = clf.fit(X, y)
print ("Accuracy:", res.best_score_)
print ("Hyper Parameters:", res.best_params_)
Accuracy: 0.915
Hyper Parameters: {'activation': 'relu', 'learning_rate': 0.01, 'units': 64}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment