Skip to content

Instantly share code, notes, and snippets.

@sourcepirate
Created May 15, 2017 12:12
Show Gist options
  • Save sourcepirate/00a12011aa84f51cbf279c36841581a1 to your computer and use it in GitHub Desktop.
Save sourcepirate/00a12011aa84f51cbf279c36841581a1 to your computer and use it in GitHub Desktop.
gradient descent algorithm
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
def hypothesis(x, theta):
"""function"""
return x.dot(theta).sum()
def compute_cost(X, Y, theta):
"""errors"""
m = Y.size
predictions = X.dot(theta).flatten()
errors = (Y - predictions)**2
result = 1
J = (1./ 2/ m) * errors.sum()
return J
def descent(X, Y, theta, alpha=0.001, iter_count=1000):
m = Y.size
new_theta = [0]*len(theta)
for i in range(iter_count):
predictions = X.dot(theta).flatten()
for j in range(len(theta)):
errors = (predictions - Y) * X[:, j]
new_theta[j] = theta[j] - alpha * (1./m) * errors.sum()
theta = new_theta
return theta
"""
h(x) = t0x0 + t1x1 + t2x2 + t3x3
"""
x = np.random.rand(2,2)
x_cap = map(np.linalg.norm , x)
theta = np.array([[1],[1]])
y = np.random.rand(2, 1)
h = hypothesis
print(theta)
theta = descent(x, y, theta, iter_count=100000, alpha=0.25)
print(theta)
y_cap = map(lambda l: hypothesis(l, theta), x)
beta_hat = np.linalg.lstsq(x,y)
print(beta_hat)
plt.plot(x_cap, y, 'r-' ,label="data plot")
plt.plot(x_cap, beta_hat[0], 'g-', label="hypothesis methodof least squares plot")
plt.plot(x_cap, y_cap, 'b-', label="descent")
plt.xlabel("X - axis")
plt.ylabel("Y - axis")
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment