Skip to content

Instantly share code, notes, and snippets.

View ImadDabbura's full-sized avatar
🎯
Focusing

Imad Dabbura ImadDabbura

🎯
Focusing
View GitHub Profile
def sigmoid_gradient(dA, Z):
A, Z = sigmoid(Z)
dZ = dA * A * (1 - A)
return dZ
def tanh_gradient(dA, Z):
A, Z = tanh(Z)
dZ = dA * (1 - np.square(A))
# Compute cross-entropy cost
def compute_cost(AL, y):
m = y.shape[1]
cost = - (1 / m) * np.sum(
np.multiply(y, np.log(AL)) + np.multiply(1 - y, np.log(1 - AL)))
return cost
# Define helper functions that will be used in L-model forward prop
def linear_forward(A_prev, W, b):
Z = np.dot(W, A_prev) + b
cache = (A_prev, W, b)
return Z, cache
def linear_activation_forward(A_prev, W, b, activation_fn):
assert activation_fn == "sigmoid" or activation_fn == "tanh" or \
activation_fn == "relu"
# Plot the 4 activation functions
z = np.linspace(-10, 10, 100)
# Computes post-activation outputs
A_sigmoid, z = sigmoid(z)
A_tanh, z = tanh(z)
A_relu, z = relu(z)
A_leaky_relu, z = leaky_relu(z)
# Plot sigmoid
def sigmoid(Z):
A = 1 / (1 + np.exp(-Z))
return A, Z
def tanh(Z):
A = np.tanh(Z)
return A, Z
def initialize_parameters(layers_dims):
np.random.seed(1)
parameters = {}
L = len(layers_dims)
for l in range(1, L):
parameters["W" + str(l)] = np.random.randn(
layers_dims[l], layers_dims[l - 1]) * 0.01
parameters["b" + str(l)] = np.zeros((layers_dims[l], 1))
# Cricles
X1 = make_circles(factor=0.5, noise=0.05, n_samples=1500)
# Moons
X2 = make_moons(n_samples=1500, noise=0.05)
fig, ax = plt.subplots(1, 2)
for i, X in enumerate([X1, X2]):
fig.set_size_inches(18, 7)
sp = SpectralClustering(n_clusters=2, affinity='nearest_neighbors')
# Cricles
X1 = make_circles(factor=0.5, noise=0.05, n_samples=1500)
# Moons
X2 = make_moons(n_samples=1500, noise=0.05)
fig, ax = plt.subplots(1, 2)
for i, X in enumerate([X1, X2]):
fig.set_size_inches(18, 7)
km = KMeans(n_clusters=2)
# Create data from three different multivariate distributions
X_1 = np.random.multivariate_normal(mean=[4, 0], cov=[[1, 0], [0, 1]], size=75)
X_2 = np.random.multivariate_normal(mean=[6, 6], cov=[[2, 0], [0, 2]], size=250)
X_3 = np.random.multivariate_normal(mean=[1, 5], cov=[[1, 0], [0, 2]], size=20)
df = np.concatenate([X_1, X_2, X_3])
# Run kmeans
km = KMeans(n_clusters=3)
km.fit(df)
labels = km.predict(df)
# Create horizantal data
X = np.tile(np.linspace(1, 5, 20), 2)
y = np.repeat(np.array([2, 4]), 20)
df = np.c_[X, y]
km = KMeans(n_clusters=2)
km.fit(df)
labels = km.predict(df)
centroids = km.cluster_centers_