Skip to content

Instantly share code, notes, and snippets.

@righthandabacus
Created April 10, 2021 17:56
Show Gist options
  • Save righthandabacus/ede5de159c712144db38573a3d82e6e1 to your computer and use it in GitHub Desktop.
Save righthandabacus/ede5de159c712144db38573a3d82e6e1 to your computer and use it in GitHub Desktop.
Example of scipy.optimize on least square linear regression
import numpy as np
import scipy as sp
import scipy.optimize as opt
# Generate linear regression
a = 3
b = 2
n = 1000
np.random.seed(42)
X = np.random.normal(loc=0, scale=4, size=(n,1))
y = a + b*X + np.random.randn(n,1)
# regression function
def regression(X, y):
# define the loss function
n, d = X.shape
X = np.hstack([np.ones((n,1)), X])
def loss(params):
residual = y - (X @ params.reshape(-1,1))
loss = np.square(residual).mean()
gradient = (-2 * residual * X).mean(axis=0)
return loss, gradient
# find initial params
params0 = np.random.randn(d+1)
return opt.minimize(loss, params0, jac=True, method="BFGS")
# Run
reg = regression(X, y)
print(reg)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment