Created
September 21, 2016 02:19
-
-
Save cookbrianj/3f36f75bdd921ebea4572360cac770da to your computer and use it in GitHub Desktop.
A simple, naive implementation of gradient descent/linear regression in Swift using capacity loss vs mileage of Nissan Leaf
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import UIKit | |
var milesDriven:[Double] = [0,100,200,300,400,500,600,700,800,900,1000,1100,1200,1300,1400,1500,1600,1700,1800,12000,16000,20000,13633,25000,11383,10200,15700,12734,14278,24000,25500,10052,21085,21451,21000,20300,20100,16000,17500,15000,20490,34173,31510,22900,23800,19000,10500,14000,19200,16500,22700,18300,19345,20791,16500,15868,13884,11275,10300,14180,23652,14900,16800,13365,32500,20651,27177,31473,15472,27435,26797,50600,41972,25725,15900,23000,44733,33700,35989,21649,34500,73205,53171,41000,30000,44263,54661,44076,39700,44897,34900,26495,28190,43520,28700,53600,55475,58078,40755,39801,34000,86400,52600,129300,61150,59983,48168,50952,55800,56000] | |
var barsLost:[Double] = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] | |
var minMileage:Double = milesDriven.min()! | |
var maxMileage:Double = milesDriven.max()! | |
var milesDrivenScaled = milesDriven.map { ($0 - minMileage) / maxMileage - minMileage } //normalize the miles | |
var minBars:Double = barsLost.min()! | |
var maxBars:Double = barsLost.max()! | |
var barsLostScaled = barsLost.map { ($0 - minBars) / maxBars - minBars } //normalize the bar loss | |
//Play with these values | |
var iterations:Int = 500 | |
var learningRate:Double = 0.01 | |
var predictionMiles:Double = 23000 | |
var scaledPredictionMiles:Double = (predictionMiles - minMileage) / maxMileage - minMileage | |
var JHistory = [Double?](repeating: nil, count: iterations) //To get the charted output in the playground | |
func predictedBarsLost(intercept:Double, slope:Double, miles:Double) -> Double { | |
return intercept + slope * miles | |
} | |
func findBestFit(x:[Double], y:[Double], iterations:Int, alpha:Double) -> (Double, Double) { | |
var intercept:Double = 0.0 | |
var slope:Double = 0.0 | |
let numberOfSamples = x.count - 1 | |
var J:Double! | |
for n in 1...iterations { | |
for i in 0...numberOfSamples { | |
J = y[i] - predictedBarsLost(intercept: intercept, slope: slope, miles: x[i]) | |
intercept += alpha * J | |
slope += alpha * J * x[i] | |
} | |
JHistory[n-1] = J //watch this array to make sure it's decreasing | |
} | |
return (intercept, slope) | |
} | |
var (best_Intercept, best_Slope) = findBestFit(x: milesDrivenScaled, y: barsLostScaled, iterations: iterations, alpha: learningRate) | |
var predictedBarLoss = predictedBarsLost(intercept: best_Intercept, slope: best_Slope, miles: (scaledPredictionMiles)) | |
//calculate the mileage and bar loss from the scaled values | |
print("At \((scaledPredictionMiles + minMileage) * maxMileage) miles we predict a loss of \((predictedBarLoss + minBars) * maxBars) bars.") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment