Created
April 12, 2017 21:17
-
-
Save chelseatroy/d9f09e9f776d99afe1851cf552659531 to your computer and use it in GitHub Desktop.
Autograd Regressor
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import matplotlib.pyplot as plt | |
import pandas as pd | |
import numpy as np | |
import matplotlib.pyplot as plt | |
import autograd.numpy as np # Thinly-wrapped numpy | |
from autograd import grad as compute_grad # The only autograd function you may ever need | |
class learner(): | |
def __init__(self,**args): | |
self.x=0 | |
self.y=0 | |
self.whist = [] | |
self.ghist = [] | |
self.w_best = 0 | |
# reset defaults if requested | |
self.K = 100 | |
if 'max_its' in args: | |
self.K = args['max_its'] | |
self.alpha = 10**-3 | |
if 'alpha' in args: | |
self.alpha = args['alpha'] | |
# load data | |
def load_data(self,csvname): | |
data = np.asarray(pd.read_csv(csvname)) | |
self.data = data | |
self.x = data[:,:-1] | |
self.y = data[:,-1] | |
self.y.shape = (len(self.y),1) | |
# compute cost value | |
def compute_cost_val(self,w): | |
P = len(self.y) | |
cost = 0 | |
# run over all data points and weights and compute total error | |
for p in range(P): | |
# get pth point | |
x_p = self.x[p] | |
y_p = self.y[p] | |
# linear combo | |
temp = w[0] + sum([v*e for v,e in zip(x_p,w[1:])]) | |
# add error to cost | |
cost += (temp - y_p)**2 | |
return cost | |
# gradient descent loop | |
def grad_descent(self): | |
# initial point | |
w = np.random.randn(np.shape(self.x)[1]+1,1) | |
# compute gradient of cost function for use in loop | |
grad = compute_grad(self.compute_cost_val) | |
# create container to record weights | |
self.whist = [] | |
self.ghist = [] | |
# descent loop | |
self.w_best = w | |
g_best = np.inf | |
for k in range(self.K): | |
# record current weight and cost | |
self.whist.append(w) | |
g = self.compute_cost_val(w) | |
self.ghist.append(g) | |
if g < g_best: | |
self.w_best = w | |
g_best = g | |
# take descent step | |
w = w - self.alpha*grad(w) | |
# a short function to perform predictions | |
def predict(self,x_input): | |
ind = np.argmin() | |
output = self.w_best[0] + np.dot(self.w_best[1:],x_input) | |
return output[0] |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment