Skip to content

Instantly share code, notes, and snippets.

boston['MEDV'] = boston_dataset.target
boston = pd.DataFrame(boston_dataset.data, columns=boston_dataset.feature_names)
boston.head()
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
%matplotlib inline
from sklearn.datasets import load_boston
boston_dataset = load_boston()
import operator
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.preprocessing import PolynomialFeatures
np.random.seed(0)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
np.random.seed(0)
x = 2 - 3 * np.random.normal(0, 1, 20)
y = x - 2 * (x ** 2) + 0.5 * (x ** 3) + np.random.normal(-3, 3, 20)
# transforming the data to include another axis
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(0)
x = 2 - 3 * np.random.normal(0, 1, 20)
y = x - 2 * (x ** 2) + 0.5 * (x ** 3) + np.random.normal(-3, 3, 20)
plt.scatter(x,y, s=10)
plt.show()
# sum of square of residuals
ssr = np.sum((y_pred - y_actual)**2)
# total sum of squares
sst = np.sum((y_actual - np.mean(y_actual))**2)
# R2 score
r2_score = 1 - (ssr/sst)
# mean squared error
mse = np.sum((y_pred - y_actual)**2)
# root mean squared error
# m is the number of training examples
rmse = np.sqrt(mse/m)
# imports
import numpy as np
class LinearRegressionUsingGD:
"""Linear Regression Using Gradient Descent.
Parameters
----------
eta : float