Skip to content

Instantly share code, notes, and snippets.

View ksivaman's full-sized avatar

Kirthi Shankar Sivamani ksivaman

View GitHub Profile
@ksivaman
ksivaman / one_layer_backward_pass.py
Created July 13, 2019 23:05
A complete backward pass for the backpropagation algorithm
def backward_pass(y_pred, train_Y, activation_dict, output_dict, params_w, params_b, layers=[4, 5, 1], activate=['R', 'S']):
gradients = {}
num_samples = train_Y.shape[0]
train_Y = train_Y.reshape(y_pred.shape)
#derivative of binary cross entropy function w.r.t. predictions
d_prev_act = - (np.divide(train_Y, y_pred) - np.divide(1 - train_Y, 1 - y_pred))
@ksivaman
ksivaman / one_layer_backward_pass.py
Created July 13, 2019 22:27
One layer backward pass for feed forwards neural networks.
def one_layer_backward_pass(curr_grad, curr_weight, curr_bias, curr_out, prev_act, activation='R'):
#how many sample in previous activations?
num = prev_act.shape[1]
#find out what we are differentiating
if activation is 'R':
d_act_func = activations.d_relu
elif activation is 'S':
d_act_func = activations.d_sigmoid
@ksivaman
ksivaman / activations_and_derivatives.py
Created July 13, 2019 21:23
A collection of activation functions and their derivatives.
#sigmoid activation
def sigmoid(input):
return 1/(1 + np.exp(-input))
#relu activation
def relu(input):
return np.maximum(input, 0)
#derivate of a sigmoid w.r.t. input
def d_sigmoid(d_init, out):
@ksivaman
ksivaman / param_init.py
Created July 13, 2019 21:11
Initialize network parameters
def init(layers=[4, 5, 1]):
np.random.seed(42)
params_w = {}
params_b = {}
for index in range(len(layers)-1):
layer_num = index + 1
in_layer_size = layers[index]
@ksivaman
ksivaman / forward.py
Created July 13, 2019 21:02
Forward pass for all the layers.
def forward_pass(train_X, params_w, params_b, layers=[4, 5, 1], activate=['R', 'S']):
num_layers = len(layers) - 1
activation_dict = {}
output_dict = {}
curr_act = train_X
for index in range(num_layers):
@ksivaman
ksivaman / one_layer_forward_pass.py
Created July 13, 2019 20:31
Forward pass for one layer in feed forward neural networks
def one_layer_forward_pass(input_activations, weights, bias, activation='R'):
output = np.dot(weights, input_activations) + bias
if activation is 'R':
activation_next = activations.relu(output)
elif activation is 'S':
activation_next = activations.sigmoid(output)
else:
raise Exception('Nahh!')