Created
January 19, 2019 01:31
-
-
Save ZSalloum/54703842f8a06e38fd76934579a6c814 to your computer and use it in GitHub Desktop.
Neural Network to train XOR function
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
def sigmoid(x): | |
return 1.0 / (1.0 + np.exp(-x)) | |
def sigmoid_prime(x): | |
return sigmoid(x) * (1.0 - sigmoid(x)) | |
def tanh(x): | |
return np.tanh(x) | |
def tanh_prime(x): | |
return 1.0 - np.tanh(x) ** 2 | |
def relu(x): | |
return np.maximum(x,0) | |
def relu_prime(x): | |
x[x <= 0] = 0 | |
x[x > 0] = 1 | |
return x | |
class Layer: | |
def __init__(self, dim, id, act, act_prime, isoutputLayer = False): | |
self.weight = 2 * np.random.random(dim) - 1 | |
self.delta = None | |
self.A = None | |
self.activation = act | |
self.activation_prime = act_prime | |
self.isoutputLayer = isoutputLayer | |
self.id = id | |
def forward(self, x): | |
z = np.dot(x, self.weight) | |
self.A = self.activation(z) | |
self.dZ = self.activation_prime(z); | |
return self.A | |
def backward(self, y, rightLayer): | |
if self.isoutputLayer: | |
error = self.A - y | |
self.delta = np.atleast_2d(error * self.dZ) | |
else: | |
self.delta = np.atleast_2d( | |
rightLayer.delta.dot(rightLayer.weight.T) | |
* self.dZ) | |
return self.delta | |
def update(self, learning_rate, left_a): | |
a = np.atleast_2d(left_a) | |
d = np.atleast_2d(self.delta) | |
ad = a.T.dot(d) | |
self.weight -= learning_rate * ad | |
class NeuralNetwork: | |
def __init__(self, layersDim, activation='tanh'): | |
if activation == 'sigmoid': | |
self.activation = sigmoid | |
self.activation_prime = sigmoid_prime | |
elif activation == 'tanh': | |
self.activation = tanh | |
self.activation_prime = tanh_prime | |
elif activation == 'relu': | |
self.activation = relu | |
self.activation_prime = relu_prime | |
self.layers = [] | |
for i in range(1, len(layersDim) - 1): | |
dim = (layersDim[i - 1] + 1, layersDim[i] + 1) | |
self.layers.append(Layer(dim, i, self.activation, self.activation_prime)) | |
dim = (layersDim[i] + 1, layersDim[i + 1]) | |
self.layers.append(Layer(dim, len(layersDim) - 1, self.activation, self.activation_prime, True)) | |
def fit(self, X, y, learning_rate=0.1, epochs=10000): | |
# Add column of ones to X | |
# This is to add the bias unit to the input layer | |
ones = np.atleast_2d(np.ones(X.shape[0])) | |
X = np.concatenate((ones.T, X), axis=1) | |
for k in range(epochs): | |
if k % 10000 == 0: print | |
'epochs:', k | |
i = np.random.randint(X.shape[0]) | |
a = X[i] | |
# compute the feed forward | |
for l in range(len(self.layers)): | |
a = self.layers[l].forward(a) | |
# colmpute the backward propagation | |
delta = self.layers[-1].backward(y[i], None) | |
for l in range(len(self.layers) - 2, -1, -1): | |
delta = self.layers[l].backward(delta, self.layers[l+1]) | |
#update weights | |
a = X[i] | |
for layer in self.layers: | |
layer.update(learning_rate, a) | |
a = layer.A | |
def predict(self, x): | |
a = np.concatenate((np.ones(1).T, np.array(x)), axis=0) | |
for l in range(0, len(self.layers)): | |
a = self.layers[l].forward(a) | |
return a | |
if __name__ == '__main__': | |
X = np.array([[0, 0], | |
[0, 1], | |
[1, 0], | |
[1, 1]]) | |
y = np.array([0, 1, 1, 0]) | |
# tanh | |
nn = NeuralNetwork([2, 3, 4, 1], activation='tanh') | |
nn.fit(X, y, learning_rate=0.1, epochs=10000) | |
print("\n\nResult with tanh") | |
for e in X: | |
print(e, nn.predict(e)) | |
# sigmoid | |
nn = NeuralNetwork([2, 3, 4, 1], activation='sigmoid') | |
nn.fit(X, y, learning_rate=0.3, epochs=20000) | |
print("\n\nResult with sigmoid") | |
for e in X: | |
print(e, nn.predict(e)) | |
# relu | |
nn = NeuralNetwork([2, 3, 4, 1], activation='relu') | |
nn.fit(X, y, learning_rate=0.1, epochs=50000) | |
print("\n\nResult with relu") | |
for e in X: | |
print(e, nn.predict(e)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment