Last active
September 19, 2021 05:17
-
-
Save thomasnield/971050e4f502bd34179ebf5289304f79 to your computer and use it in GitHub Desktop.
Logistic Regression with SymPy
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from sympy import * | |
import pandas as pd | |
m, b, i, n = symbols('m b i n') | |
x, y = symbols('x y', cls=Function) | |
joint_likelihood = Sum(log((1.0 / (1.0 + exp(-(b + m * x(i)))))**y(i) * (1.0 - (1.0 / (1.0 + exp(-(b + m * x(i))))))**(1-y(i))), (i, 0, n)) | |
points = list(pd.read_csv("https://tinyurl.com/y2cocoo7").itertuples()) | |
d_m = lambdify([m,b], diff(joint_likelihood, m).subs(n, len(points) - 1).doit() \ | |
.replace(x, lambda i: points[i].x) \ | |
.replace(y, lambda i: points[i].y)) | |
d_b = lambdify([m,b], diff(joint_likelihood, b).subs(n, len(points) - 1).doit() \ | |
.replace(x, lambda i: points[i].x) \ | |
.replace(y, lambda i: points[i].y)) | |
# Perform Gradient Descent | |
m = 0.01 | |
b = 0.01 | |
L = .01 | |
for j in range(10_000): | |
m += d_m(m,b) * L | |
b += d_b(m,b) * L | |
print(m, b) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment