Skip to content

Instantly share code, notes, and snippets.

@chelseatroy
Created April 9, 2017 15:45
Show Gist options
  • Save chelseatroy/fd99a4fef3d4eaeb0ae17930534584fb to your computer and use it in GitHub Desktop.
Save chelseatroy/fd99a4fef3d4eaeb0ae17930534584fb to your computer and use it in GitHub Desktop.
Linear Regressor
class LinearRegressor(Object):
def gradient_descent(self, x, y, alpha, max_iterations):
num_rows = x.shape[1] + 1
w = np.array([random.random() for item in range(num_rows)]).reshape(num_rows,1)
best_weights = []
lowest_cost = None
iter = 1
for k in range(0, max_iterations):
gradient = self.compute_gradient(x, y, w)
cost = self.compute_costval(x,y,w)
w = w - alpha*gradient
if lowest_cost is None or cost < lowest_cost:
best_weights = w
lowest_cost = cost
return best_weights
def compute_gradient(self,x,y,w):
P = len(y)
grad = 0
for p in range(P):
x_p = list(x[p])
x_p.insert(0,1)
x_p = np.array(x_p)
x_p.shape = (len(x_p),1)
y_p = y[p]
temp = np.dot(x_p*x_p.T,w) - x_p*y_p
grad+=2*temp
return grad
def compute_costval(self,x,y,w):
P = len(y)
cost = 0
# run over all data points and weights and compute total error
for p in range(P):
x_p = x[p]
y_p = y[p]
cost += (w[0] + np.dot(x_p,w[1:]) - y_p)**2
return cost
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment