Created
July 23, 2020 16:10
-
-
Save szmeku/f62f171639d5a573a3ebb44e9cd66022 to your computer and use it in GitHub Desktop.
gradient descent for linear regression to find formula for linear function
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # todo: find functions formulas using gradient descent | |
| # todo: check if you have | |
| # t1 answers | |
| # h1 = 1 2 | |
| # h2 = 10 20 | |
| import numpy as np | |
| features = np.array([ | |
| [1, 1, 1, 1], | |
| [-1, 2, 3, 10] | |
| ]) | |
| Y = np.array([-1, 5, 7, 21]) | |
| T = np.array([1, 10]) | |
| def costDerivative(T, features, Y): | |
| return T.dot(features) - Y | |
| def costDerivativesPerFeature(T, features, Y): | |
| return map( | |
| lambda feature: costDerivative(T, features, Y).dot(feature), | |
| features | |
| ) | |
| def gradientDescent(nextT, features, Y): | |
| samplesLength = features.shape[1] | |
| step = 0.05 | |
| prevT = np.array([0, 0]) | |
| count = 0 | |
| while any(prevT != nextT): | |
| count += 1 | |
| prevT = nextT | |
| nextT = np.array(list(map( | |
| lambda t, costDerivativeForFeature: t - 1 / samplesLength * step * costDerivativeForFeature, | |
| nextT, costDerivativesPerFeature(nextT, features, Y) | |
| ))) | |
| return nextT, count | |
| print(gradientDescent(T, features, Y)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment