Skip to content

Instantly share code, notes, and snippets.

View angadsinghsandhu's full-sized avatar
🎯
Focusing

Angad Sandhu angadsinghsandhu

🎯
Focusing
View GitHub Profile
@angadsinghsandhu
angadsinghsandhu / initialize_NN.py
Created December 1, 2020 04:30
Neural Network From Scratch
# imports
import numpy as np
# nn class
class NeuralNetwork:
# initializing variables
def __init__(self, x, y, learning_rate=0.06, num_layers=2):
# input array
@angadsinghsandhu
angadsinghsandhu / sigmoid.py
Created December 1, 2020 04:39
the sigmoid squashing function
# activation function
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
@angadsinghsandhu
angadsinghsandhu / train.py
Created December 1, 2020 05:11
function that executed forward prop, backprop and loss calculation
# training neural net
def train(self):
# dynamically calculating layers and their respective z
for i in range(len(self.input)):
self.z0 = self.input[i].reshape([-1, 1])
# forward step
output = self.forwardprop()
@angadsinghsandhu
angadsinghsandhu / d_sigmoid.py
Created December 1, 2020 05:14
Derivative of the sigmoid squashing function
# activation function derrivative
def d_sigmoid(self, z):
eta = self.sigmoid(z)
return eta * (1 - eta)
@angadsinghsandhu
angadsinghsandhu / feedforward.py
Created December 1, 2020 05:17
Forward Propagation
# Forward Propagation Logic
def forwardprop(self):
# dynamically calculating first layer
exec("self.z1 = np.dot( self.w1, self.z0 ) + self.b1")
# dynamically calculating the "a" of layer
exec("self.a1 = self.sigmoid(self.z1)")
# dynamically calculating all other layers
for i in range(2, self.num_layers + 1):
@angadsinghsandhu
angadsinghsandhu / backprop.py
Last active December 1, 2020 16:03
Backward Propagation
# Backward Propagation Logic
def backprop(self, y_hat, y):
# using chain rule to chain rule to find derivative of the
# loss function with respect to the last layer i.e. z
j = self.num_layers
# calculating last dz
cmd = "self.dz{} = 2 * (y_hat - y) * self.d_sigmoid(self.z{})".format(j, j, j)
exec(cmd)
@angadsinghsandhu
angadsinghsandhu / nn_loss.py
Created December 1, 2020 05:22
Calculating Loss
# Calculating Loss
def NN_loss(self, y_hat, y):
loss = (y_hat - y)**2
loss = loss[0]
self.loss += loss
@angadsinghsandhu
angadsinghsandhu / resetloss.py
Created December 1, 2020 05:23
Resetting Loss
# reset loss after each iteration
def resetloss(self):
self.loss = 0
@angadsinghsandhu
angadsinghsandhu / predict.py
Last active December 2, 2020 08:37
predicting input values
# Predicting input values
def predict(self, num):
# setting input as the first layer
self.z0 = num
# forward propagating and returning the result
return self.forwardprop()[0]
from Framework.ClassificationFramework import NeuralNetwork
from Framework.predict import predict_classification as predict
from Framework.normalize import normalize
from Data_Creation import odd_even_data
def run():
# getting data
x_train, y_train = odd_even_data.data()
# normalizing data