Skip to content

Instantly share code, notes, and snippets.

@codeboy101
Last active January 14, 2017 12:13
Show Gist options
  • Select an option

  • Save codeboy101/3b15bc062cd31be67f3164eeec8bc95d to your computer and use it in GitHub Desktop.

Select an option

Save codeboy101/3b15bc062cd31be67f3164eeec8bc95d to your computer and use it in GitHub Desktop.
import numpy as np
import pandas as pd
df = pd.read_csv('m_dataset.txt', header=None, names=['size', 'age', 'price'])
df = (df - df.mean()) / df.std()
df.insert(0, 'Ones', 1)
cols = df.shape[1]
X = np.matrix(df.iloc[:, 0:cols-1].values)
y = np.matrix(df.iloc[:, cols-1:cols].values)
theta = np.matrix(np.array([0, 0, 0]))
def compute_cost(X, y, theta):
hypothesis = np.power(np.matmul(X, theta.T) - y, 2)
return np.sum(hypothesis) / 2 * len(X)
def gradient_descent(X, y, theta, alpha, iters):
temp_theta = np.matrix(np.zeros(theta.shape))
parameters = int(theta.ravel().shape[1])
cost = np.zeros(iters)
for i in range(iters):
error = (X * theta.T) - y
for j in range(parameters):
column = np.multiply(error, X[:, j])
temp_theta[0, j] = theta[0, j] - ((alpha / len(X)) * np.sum(column))
final_theta = temp_theta
cost[i] = compute_cost(X, y, final_theta)
return final_theta, cost
alpha = 0.001
iters = 1000
params, cost = gradient_descent(X, y, theta, alpha, iters)
print(params)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment