Skip to content

Instantly share code, notes, and snippets.

@jaidevd
Created March 24, 2012 22:55
Show Gist options
  • Save jaidevd/2188832 to your computer and use it in GitHub Desktop.
Save jaidevd/2188832 to your computer and use it in GitHub Desktop.
Basic Perceptron Learning for AND Gate
import numpy as np
class Perceptron:
def __init__(self,Weights,Biases):
self.Weights = Weights
self.Biases = Biases
def Train(self, Training, LearningRate):
y_in = np.dot(Training[:,0:2], self.Weights) + self.Biases
# This is meant for a simple two-input AND gate, hence only first two columns of the training vector.
# But can be easily extended to any linearly separable dataset.
op = -1*np.ones(y_in.shape, dtype = int)
op[y_in > 0] = 1 # Activation
t = op.__eq__(Training[:,2].reshape((4,1))) # There should be a better way of
# comparing arrays
while not t.all():
for i in range(Training.shape[0]):
self.Weights = self.Weights + LearningRate*Training[i,2]* \
Training[i,0:2].reshape((2,1))
self.Biases = self.Biases + LearningRate*Training[i,2]
y_in = np.dot(Training[:,0:2], self.Weights) + self.Biases
op = -1*np.ones(y_in.shape, dtype = int)
op[y_in > 0] = 1
t = op.__eq__(Training[:,2].reshape((4,1)))
def Test(self,Testing):
y_in = np.dot(Testing,self.Weights) + self.Biases
op = -1*np.ones(y_in.shape,dtype = int)
op[y_in > 0] = 1
return op
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment