Last active
November 24, 2017 11:29
-
-
Save opyate/e28992cbaacf6623fa04b0455d72d25d to your computer and use it in GitHub Desktop.
deeplearning.ai week 2 code snippets
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
# Activation functions | |
sigmoid = lambda x: 1 / (1 + np.exp(-x)) | |
# tanh is just np.tanh | |
relu = lambda x: np.maximum(0, x) | |
leaky_relu = lambda x: np.maximum(0.001*x, x) | |
def sigmoid_derivative(x): | |
s = sigmoid(x) | |
return s * (1 - s) | |
# reshaping an image -> vector | |
def image2vector(image): | |
""" | |
Argument: | |
image -- a numpy array of shape (length, height, depth) | |
Returns: | |
v -- a vector of shape (length*height*depth, 1) | |
""" | |
### START CODE HERE ### (≈ 1 line of code) | |
return image.reshape(image.shape[0] * image.shape[1] * image.shape[2], 1) | |
# for a set of images | |
images2vector = lambda image_set: image_set.reshape(image_set.shape[0], -1).T | |
# Keep in mind that you can unroll to RGBRGBRGB or RRRGGGBBB | |
# It doesn't matter - as long as you're consistent through-out | |
# gradient descent converges faster after normalization | |
normalizeRows = lambda x: x / np.linalg.norm(x,axis=1,keepdims=True) | |
# You can think of softmax as a normalizing function used when your algorithm needs to classify two or more classes. | |
def softmax(x): | |
x_exp = np.exp(x) | |
return x_exp / np.sum(x_exp, axis=1, keepdims=True) | |
# L1 loss is used to evaluate the performance of your model. | |
# The bigger your loss is, the more different your predictions (yhat) are from the true values (y). | |
# In deep learning, you use optimization algorithms like | |
# Gradient Descent to train your model and to minimize the cost. | |
L1 = lambda yhat, y: np.sum(np.abs(y - yhat)) | |
# L2 loss | |
L2 = lambda yhat, y: np.sum(np.dot(y - yhat, y - yhat)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment