Skip to content

Instantly share code, notes, and snippets.

@ImadDabbura
Last active September 20, 2018 15:00
Show Gist options
  • Save ImadDabbura/04be8d7d93873de3e930c791508a57d1 to your computer and use it in GitHub Desktop.
Save ImadDabbura/04be8d7d93873de3e930c791508a57d1 to your computer and use it in GitHub Desktop.
# Define helper functions that will be used in L-model forward prop
def linear_forward(A_prev, W, b):
Z = np.dot(W, A_prev) + b
cache = (A_prev, W, b)
return Z, cache
def linear_activation_forward(A_prev, W, b, activation_fn):
assert activation_fn == "sigmoid" or activation_fn == "tanh" or \
activation_fn == "relu"
if activation_fn == "sigmoid":
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = sigmoid(Z)
elif activation_fn == "tanh":
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = tanh(Z)
elif activation_fn == "relu":
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = relu(Z)
assert A.shape == (W.shape[0], A_prev.shape[1])
cache = (linear_cache, activation_cache)
return A, cache
def L_model_forward(X, parameters, hidden_layers_activation_fn="relu"):
A = X
caches = []
L = len(parameters) // 2
for l in range(1, L):
A_prev = A
A, cache = linear_activation_forward(
A_prev, parameters["W" + str(l)], parameters["b" + str(l)],
activation_fn=hidden_layers_activation_fn)
caches.append(cache)
AL, cache = linear_activation_forward(
A, parameters["W" + str(L)], parameters["b" + str(L)],
activation_fn="sigmoid")
caches.append(cache)
assert AL.shape == (1, X.shape[1])
return AL, caches
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment