Skip to content

Instantly share code, notes, and snippets.

@itsAnanth
Created September 24, 2024 22:02
Show Gist options
  • Save itsAnanth/089fab7cb0ea5d975423977c4bdd34fc to your computer and use it in GitHub Desktop.
Save itsAnanth/089fab7cb0ea5d975423977c4bdd34fc to your computer and use it in GitHub Desktop.
import numpy as np
class LogisticRegression:
def __init__(self, learning_rate=0.01, epochs=1000):
self.learning_rate = learning_rate
self.epochs = epochs
self.weights = None
self.bias = None
def sigmoid(self, z):
return 1 / (1 + np.exp(-z))
def fit(self, X, y):
num_samples, num_features = X.shape
# Initialize weights and bias
self.weights = np.zeros(num_features)
self.bias = 0
# Gradient Descent
for _ in range(self.epochs):
linear_model = np.dot(X, self.weights) + self.bias
# map the output probabilities using sigmoid between 0 and 1
y_predicted = self.sigmoid(linear_model)
# Derrivative of binary cross entropy loss with respect to weight and bias terms
dw = (1 / num_samples) * np.dot(X.T, (y_predicted - y))
db = (1 / num_samples) * np.sum(y_predicted - y)
# Update weights and bias
self.weights -= self.learning_rate * dw
self.bias -= self.learning_rate * db
def predict(self, X):
linear_model = np.dot(X, self.weights) + self.bias
y_predicted = self.sigmoid(linear_model)
# Convert probabilities to binary output by using a step function
y_predicted_class = [1 if i > 0.5 else 0 for i in y_predicted]
return np.array(y_predicted_class)
if __name__ == "__main__":
# Sample data (X) and labels (y) for AND gate
X = np.array([[0, 0], [1, 0], [0, 1], [1, 1]])
y = np.array([0, 0, 0, 1]) # AND gate
model = LogisticRegression(learning_rate=0.1, epochs=1000)
model.fit(X, y)
predictions = model.predict(X)
print("Predictions:", predictions)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment