This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def update(params_values, grads_values, nn_architecture, learning_rate): | |
for layer_idx, layer in enumerate(nn_architecture): | |
params_values["W" + str(layer_idx)] -= learning_rate * grads_values["dW" + str(layer_idx)] | |
params_values["b" + str(layer_idx)] -= learning_rate * grads_values["db" + str(layer_idx)] | |
return params_values; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def full_backward_propagation(Y_hat, Y, memory, params_values, nn_architecture): | |
grads_values = {} | |
m = Y.shape[1] | |
Y = Y.reshape(Y_hat.shape) | |
dA_prev = - (np.divide(Y, Y_hat) - np.divide(1 - Y, 1 - Y_hat)); | |
for layer_idx_prev, layer in reversed(list(enumerate(nn_architecture))): | |
layer_idx_curr = layer_idx_prev + 1 | |
activ_function_curr = layer["activation"] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def single_layer_backward_propagation(dA_curr, W_curr, b_curr, Z_curr, A_prev, activation="relu"): | |
m = A_prev.shape[1] | |
if activation is "relu": | |
backward_activation_func = relu_backward | |
elif activation is "sigmoid": | |
backward_activation_func = sigmoid_backward | |
else: | |
raise Exception('Non-supported activation function') | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def get_cost_value(Y_hat, Y): | |
m = Y_hat.shape[1] | |
cost = -1 / m * (np.dot(Y, np.log(Y_hat).T) + np.dot(1 - Y, np.log(1 - Y_hat).T)) | |
return np.squeeze(cost) | |
def get_accuracy_value(Y_hat, Y): | |
Y_hat_ = convert_prob_into_class(Y_hat) | |
return (Y_hat_ == Y).all(axis=0).mean() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def full_forward_propagation(X, params_values, nn_architecture): | |
memory = {} | |
A_curr = X | |
for idx, layer in enumerate(nn_architecture): | |
layer_idx = idx + 1 | |
A_prev = A_curr | |
activ_function_curr = layer["activation"] | |
W_curr = params_values["W" + str(layer_idx)] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def single_layer_forward_propagation(A_prev, W_curr, b_curr, activation="relu"): | |
Z_curr = np.dot(W_curr, A_prev) + b_curr | |
if activation is "relu": | |
activation_func = relu | |
elif activation is "sigmoid": | |
activation_func = sigmoid | |
else: | |
raise Exception('Non-supported activation function') | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def sigmoid(Z): | |
return 1/(1+np.exp(-Z)) | |
def relu(Z): | |
return np.maximum(0,Z) | |
def sigmoid_backward(dA, Z): | |
sig = sigmoid(Z) | |
return dA * sig * (1 - sig) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
nn_architecture = [ | |
{"input_dim": 2, "output_dim": 4, "activation": "relu"}, | |
{"input_dim": 4, "output_dim": 6, "activation": "relu"}, | |
{"input_dim": 6, "output_dim": 6, "activation": "relu"}, | |
{"input_dim": 6, "output_dim": 4, "activation": "relu"}, | |
{"input_dim": 4, "output_dim": 1, "activation": "sigmoid"}, | |
] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def init_layers(nn_architecture, seed = 99): | |
np.random.seed(seed) | |
number_of_layers = len(nn_architecture) | |
params_values = {} | |
for idx, layer in enumerate(nn_architecture): | |
layer_idx = idx + 1 | |
layer_input_size = layer["input_dim"] | |
layer_output_size = layer["output_dim"] | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Adding callback functions that will run on each epoch | |
testmodelcb = keras.callbacks.LambdaCallback(on_epoch_end=save_model_prediction_graph) | |
# Compilation of the model | |
model.compile(loss='binary_crossentropy', optimizer='adamax', metrics=['accuracy']) | |
# Model training | |
model.fit(X_train, y_train, epochs=N_EPOCHS, verbose=0, callbacks=[testmodelcb]) |