Skip to content

Instantly share code, notes, and snippets.

@TakuTsuzuki
Last active August 29, 2015 14:14
Show Gist options
  • Save TakuTsuzuki/04c99fcc58e9607c96ca to your computer and use it in GitHub Desktop.
Save TakuTsuzuki/04c99fcc58e9607c96ca to your computer and use it in GitHub Desktop.
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_deriv(x):
return x * (1 - x)
def softmax(x):
temp = np.exp(x)
return temp / np.sum(temp)
def corrupt(x, noise):
return np.binomial(size = x.shape, n = 1, p = 1.0 - noise) * x
class Logistic:
def __init__(self, numInput, numOutput):
self.act1 = sigmoid
self.act1_deriv = sigmoid_deriv
self.act2 = softmax
self.numInput = numInput + 1
self.numOutput = numOutput
self.weight_out = np.random.uniform(-1.0, 1.0, (self.numOutput, self.numInput))
def fit1(self, X, t, learning_rate=0.1, epochs=100000):
X = np.hstack([np.ones([X.shape[0], 1]), X])
t = np.array(t)
for k in range(epochs):
i = np.random.randint(X.shape[0])
x = X[i]
y = self.act2(np.dot(self.weight_out, x))
delta = y - t[i]
z = np.atleast_2d(x)
delta = np.atleast_2d(delta)
self.weight_out -= learning_rate * np.dot(delta.T, z)
return self.weight_out
def predict(self, x):
x = np.array(x)
x = np.insert(x, 0, 1)
y = self.act2(np.dot(self.weight_out, x))
return y
import numpy as np
from Logistic_MIST import Logistic
from sklearn.datasets import fetch_mldata
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import LabelBinarizer
from sklearn.metrics import confusion_matrix, classification_report
if __name__ == "__main__":
mnist = fetch_mldata('MNIST original', data_home=".")
# create train data
X = mnist.data
y = mnist.target
# normalize pixcell 0-0.1
X = X.astype(np.float64)
X /= X.max()
# construct multilayer perceptron
mlp = Logistic(28*28,10,)
# divide test-train data
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1)
# translate label of teacher to 1-of-K
labels_train = LabelBinarizer().fit_transform(y_train)
labels_test = LabelBinarizer().fit_transform(y_test)
# learning
mlp.fit1(X_train, labels_train, learning_rate=0.02, epochs=200000)
# prediction
predictions = []
for i in range(X_test.shape[0]):
o = mlp.predict(X_test[i])
predictions.append(np.argmax(o))
print confusion_matrix(y_test, predictions)
print classification_report(y_test, predictions)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment