Skip to content

Instantly share code, notes, and snippets.

@ShigekiKarita
Created February 25, 2016 00:52
Show Gist options
  • Save ShigekiKarita/560fa6c2412a6399d261 to your computer and use it in GitHub Desktop.
Save ShigekiKarita/560fa6c2412a6399d261 to your computer and use it in GitHub Desktop.
from __future__ import print_function
import numpy as np
import chainer
import chainer.links as L
import chainer.optimizers as O
import data
import net
def compute(optimizer, inp, out, volatile, batchsize):
total = inp.shape[0]
sum_accuracy = 0
sum_loss = 0
model = optimizer.target
perm = np.random.permutation(inp.shape[0])
for i in range(0, total, batchsize):
x = chainer.Variable(np.asarray(inp[perm[i:i + batchsize]]), volatile)
t = chainer.Variable(np.asarray(out[perm[i:i + batchsize]]), volatile)
loss = model(x, t)
sum_loss += float(model.loss.data) * len(t.data) / total
sum_accuracy += float(model.accuracy.data) * len(t.data) / total
if volatile == "off":
model.zerograds()
loss.backward()
optimizer.update()
return 'mean loss={}, accuracy={}'.format(sum_loss, sum_accuracy)
if __name__ == "__main__":
# prepare NN
optimizer = O.Adam()
optimizer.setup(L.Classifier(net.MnistMLP(784, 1000, 10)))
batchsize = 100
# prepare dataset
mnist = data.load_mnist_data()
mnist['data'] = mnist['data'].astype(np.float32) / 255
mnist['target'] = mnist['target'].astype(np.int32)
train_size = 60000
x_train, x_test = np.split(mnist['data'], [train_size])
y_train, y_test = np.split(mnist['target'], [train_size])
for epoch in range(20):
print('epoch', epoch)
print("train\t", compute(optimizer, x_train, y_train, "off", batchsize))
print("test\t", compute(optimizer, x_test, y_test, "on", batchsize))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment