Last active
October 3, 2021 00:13
-
-
Save arseniyturin/c31f12c14a81da022d5c8af932ff7602 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from numpy import log, dot, e | |
from numpy.random import rand | |
class LogisticRegression: | |
def sigmoid(self, z): return 1 / (1 + e**(-z)) | |
def cost_function(self, X, y, weights): | |
z = dot(X, weights) | |
predict_1 = y * log(self.sigmoid(z)) | |
predict_0 = (1 - y) * log(1 - self.sigmoid(z)) | |
return -sum(predict_1 + predict_0) / len(X) | |
def fit(self, X, y, epochs=25, lr=0.05): | |
loss = [] | |
weights = rand(X.shape[1]) | |
N = len(X) | |
for _ in range(epochs): | |
# Gradient Descent | |
y_hat = self.sigmoid(dot(X, weights)) | |
weights -= lr * dot(X.T, y_hat - y) / N | |
# Saving Progress | |
loss.append(self.cost_function(X, y, weights)) | |
self.weights = weights | |
self.loss = loss | |
def predict(self, X): | |
# Predicting with sigmoid function | |
z = dot(X, self.weights) | |
# Returning binary result | |
return [1 if i > 0.5 else 0 for i in self.sigmoid(z)] |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment