Last active
October 5, 2018 15:11
-
-
Save adityapatadia/6c7f98c37ee5166f314c6a5009222381 to your computer and use it in GitHub Desktop.
Simple Neural Network in Python which learns AND gate
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
def sigmoid(x, derivative=False): | |
return x*(1-x) if derivative else 1/(1+np.exp(-x)) | |
class NeuralNetwork: | |
def __init__(self, x, y): | |
self.input = x | |
self.weights1 = np.random.rand(self.input.shape[1],4) | |
self.weights2 = np.random.rand(4,1) | |
self.y = y | |
self.output = np.zeros(self.y.shape) | |
def feedforward(self): | |
self.layer1 = sigmoid(np.dot(self.input, self.weights1)) | |
self.output = sigmoid(np.dot(self.layer1, self.weights2)) | |
def backprop(self): | |
# application of the chain rule to find derivative of the loss function with respect to weights2 and weights1 | |
d_weights2 = np.dot(self.layer1.T, (2*(self.y - self.output) * sigmoid(self.output, derivative=True))) | |
d_weights1 = np.dot(self.input.T, (np.dot(2*(self.y - self.output) * sigmoid(self.output, derivative=True), self.weights2.T) * sigmoid(self.layer1, derivative=True))) | |
# update the weights with the derivative (slope) of the loss function | |
self.weights1 += d_weights1 | |
self.weights2 += d_weights2 | |
if __name__ == "__main__": | |
X = np.array([[0,0], | |
[0,1], | |
[1,0], | |
[1,1]]) | |
y = np.array([[0],[0],[0],[1]]) | |
nn = NeuralNetwork(X,y) | |
for i in range(1500): | |
nn.feedforward() | |
nn.backprop() | |
print(nn.output) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment