Skip to content

Instantly share code, notes, and snippets.

class TestGradientDescent(unittest.TestCase):
def test_correct_prediction(self):
global X
global y
if len(X.shape) != 2:
X = X.reshape(X.shape[0], 1)
w = fit(X, y)
y_hat = predict(X, w).round()
self.assertTrue((y_hat == y).all())
def fit(X, y, n_iter=100000, lr=0.01):
W = np.zeros(X.shape[1])
for i in range(n_iter):
z = np.dot(X, W)
h = sigmoid(z)
gradient = np.dot(X.T, (h - y)) / y.size
W -= lr * gradient
def fit(X, y, n_iter=100000, lr=0.001):
W = np.zeros(X.shape[1])
for i in range(n_iter):
z = np.dot(X, W)
h = sigmoid(z)
gradient = np.dot(X.T, (h - y)) / y.size
W -= lr * gradient
def add_intercept(X):
intercept = np.ones((X.shape[0], 1))
return np.concatenate((intercept, X), axis=1)
def predict(X, W):
X = add_intercept(X)
return sigmoid(np.dot(X, W))
def fit(X, y, n_iter=100000, lr=0.01):
class TestLogisticRegressor(unittest.TestCase):
def test_correct_prediction(self):
global X
global y
X = X.reshape(X.shape[0], 1)
clf = LogisticRegressor()
y_hat = clf.fit(X, y).predict(X)
self.assertTrue((y_hat == y).all())
class LogisticRegressor:
def _add_intercept(self, X):
intercept = np.ones((X.shape[0], 1))
return np.concatenate((intercept, X), axis=1)
def predict_probs(self, X):
X = self._add_intercept(X)
return sigmoid(np.dot(X, self.W))
X_test = np.array([10, 250])
X_test = X_test.reshape(X_test.shape[0], 1)
y_test = LogisticRegressor().fit(X, y).predict(X_test)
df_train = pd.read_csv('house_prices_train.csv')
df_train['SalePrice'].describe()
class TestLoss(unittest.TestCase):
def test_zero_h_zero_y(self):
self.assertAlmostEqual(loss(h=np.array([0]), y=np.array([0])), 0)
def test_one_h_zero_y(self):
self.assertAlmostEqual(loss(h=np.array([1]), y=np.array([0])), 0.5)
def test_two_h_zero_y(self):
self.assertAlmostEqual(loss(h=np.array([2]), y=np.array([0])), 2)