Created
June 20, 2019 11:48
-
-
Save RafayAK/09ea8b10d350edb853365721c573153b to your computer and use it in GitHub Desktop.
The main training loop for 2-layer nn
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
costs = [] # initially empty list, this will store all the costs after a certian number of epochs | |
# Start training | |
for epoch in range(number_of_epochs): | |
# ------------------------- forward-prop ------------------------- | |
Z1.forward(X_train) | |
A1.forward(Z1.Z) | |
Z2.forward(A1.A) | |
A2.forward(Z2.Z) | |
# ---------------------- Compute Cost ---------------------------- | |
cost, dA2 = compute_cost(Y=Y_train, Y_hat=A2.A) | |
# print and store Costs every 100 iterations. | |
if (epoch % 100) == 0: | |
#print("Cost at epoch#" + str(epoch) + ": " + str(cost)) | |
print("Cost at epoch#{}: {}".format(epoch, cost)) | |
costs.append(cost) | |
# ------------------------- back-prop ---------------------------- | |
A2.backward(dA2) | |
Z2.backward(A2.dZ) | |
A1.backward(Z2.dA_prev) | |
Z1.backward(A1.dZ) | |
# ----------------------- Update weights and bias ---------------- | |
Z2.update_params(learning_rate=learning_rate) | |
Z1.update_params(learning_rate=learning_rate) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment