Created
December 24, 2017 03:27
-
-
Save mathias-brandewinder/d48abe4a571c53a4a70c709c3121a566 to your computer and use it in GitHub Desktop.
Baby steps with CNTK and F#
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Code for the post http://brandewinder.com/2017/12/23/baby-steps-with-cntk-and-fsharp/ | |
open System | |
open System.IO | |
open System.Collections.Generic | |
Environment.SetEnvironmentVariable("Path", | |
Environment.GetEnvironmentVariable("Path") + ";" + __SOURCE_DIRECTORY__) | |
let dependencies = [ | |
"./packages/CNTK.CPUOnly/lib/net45/x64/" | |
"./packages/CNTK.CPUOnly/support/x64/Dependency/" | |
"./packages/CNTK.CPUOnly/support/x64/Dependency/Release/" | |
"./packages/CNTK.CPUOnly/support/x64/Release/" | |
] | |
dependencies | |
|> Seq.iter (fun dep -> | |
let path = Path.Combine(__SOURCE_DIRECTORY__,dep) | |
Environment.SetEnvironmentVariable("Path", | |
Environment.GetEnvironmentVariable("Path") + ";" + path) | |
) | |
#I "./packages/CNTK.CPUOnly/lib/net45/x64/" | |
#I "./packages/CNTK.CPUOnly/support/x64/Dependency/" | |
#I "./packages/CNTK.CPUOnly/support/x64/Dependency/Release/" | |
#I "./packages/CNTK.CPUOnly/support/x64/Release/" | |
#r "./packages/CNTK.CPUOnly/lib/net45/x64/Cntk.Core.Managed-2.3.1.dll" | |
open CNTK | |
let inputDim = 2 | |
let outputDim = 1 | |
let input = Variable.InputVariable(NDShape.CreateNDShape [inputDim], DataType.Double, "input") | |
let output = Variable.InputVariable(NDShape.CreateNDShape [outputDim], DataType.Double, "output") | |
let device = DeviceDescriptor.CPUDevice | |
let predictor = | |
let dim = input.Shape.[0] | |
let weights = new Parameter(NDShape.CreateNDShape [dim], DataType.Double, 0.0, device, "weights") | |
// create an intermediate Function | |
let product = CNTKLib.TransposeTimes(input, weights) | |
let constant = new Parameter(NDShape.CreateNDShape [ outputDim ], DataType.Double, 0.0, device, "constant") | |
CNTKLib.Plus(new Variable(product), constant) | |
let inputValue = Value.CreateBatch(NDShape.CreateNDShape [inputDim], [| 3.0; 5.0 |], device) | |
let inputMap = | |
let map = Dictionary<Variable,Value>() | |
map.Add(input, inputValue) | |
map | |
let predictedOutput = predictor.Output | |
let weights = | |
predictor.Parameters () | |
|> Seq.find (fun p -> p.Name = "weights") | |
let constant = | |
predictor.Parameters () | |
|> Seq.find (fun p -> p.Name = "constant") | |
let outputMap = | |
let map = Dictionary<Variable,Value>() | |
map.Add(predictedOutput, null) | |
map.Add(weights, null) | |
map.Add(constant, null) | |
map | |
predictor.Evaluate(inputMap,outputMap,device) | |
let currentPrediction = | |
outputMap.[predictedOutput].GetDenseData<float>(predictedOutput) | |
|> Seq.map (fun x -> x |> Seq.toArray) | |
|> Seq.toArray | |
let currentWeights = | |
outputMap.[weights].GetDenseData<float>(weights) | |
|> Seq.map (fun x -> x |> Seq.toArray) | |
|> Seq.toArray | |
let currentConstant = | |
outputMap.[constant].GetDenseData<float>(constant) | |
|> Seq.map (fun x -> x |> Seq.toArray) | |
|> Seq.toArray | |
// training : we need to supply labels, and loss function | |
let batchInputValue = Value.CreateBatch(NDShape.CreateNDShape [inputDim], [| 3.0; 5.0 |], device) | |
let batchOutputValue = Value.CreateBatch(NDShape.CreateNDShape [outputDim], [| 10.0 |], device) | |
let batchInputValue = Value.CreateBatch(NDShape.CreateNDShape [inputDim], [| 3.0; 5.0; 1.0; 7.0 |], device) | |
let batchOutputValue = Value.CreateBatch(NDShape.CreateNDShape [outputDim], [| 10.0; 7.0 |], device) | |
let batch = | |
[ | |
input,batchInputValue | |
output,batchOutputValue | |
] | |
|> dict | |
let loss = CNTKLib.SquaredError(new Variable(predictor), output, "loss") | |
let evaluation = CNTKLib.SquaredError(new Variable(predictor), output, "evaluation") | |
let learningRatePerSample = new TrainingParameterScheduleDouble(0.01, uint32 1) | |
let learners = | |
ResizeArray<Learner>( | |
[ | |
Learner.SGDLearner(predictor.Parameters(), learningRatePerSample) | |
] | |
) | |
let trainer = Trainer.CreateTrainer(predictor, loss, evaluation, learners) | |
for i in 0 .. 10 do | |
let _ = trainer.TrainMinibatch(batch,true,device) | |
trainer.PreviousMinibatchLossAverage () |> printfn "Loss: %f" | |
trainer.PreviousMinibatchEvaluationAverage () |> printfn "Eval: %f" | |
// train on a batch | |
module BatchExample = | |
let inputDim = 2 | |
let outputDim = 1 | |
let input = Variable.InputVariable(NDShape.CreateNDShape [inputDim], DataType.Double, "input") | |
let output = Variable.InputVariable(NDShape.CreateNDShape [outputDim], DataType.Double, "output") | |
let device = DeviceDescriptor.CPUDevice | |
let predictor = | |
let dim = input.Shape.[0] | |
let weights = new Parameter(NDShape.CreateNDShape [dim], DataType.Double, 0.0, device, "weights") | |
let product = CNTKLib.TransposeTimes(input, weights) | |
let constant = new Parameter(NDShape.CreateNDShape [ outputDim ], DataType.Double, 0.0, device, "constant") | |
CNTKLib.Plus(new Variable(product), constant) | |
let realModel (features:float[]) = | |
3.0 * features.[0] - 2.0 * features.[1] + 5.0 | |
let rng = Random(123456) | |
let batch () = | |
let batchSize = 32 | |
let features = [| rng.NextDouble(); rng.NextDouble() |] | |
let labels = [| realModel features |] | |
let inputValues = Value.CreateBatch(NDShape.CreateNDShape [inputDim], features, device) | |
let outputValues = Value.CreateBatch(NDShape.CreateNDShape [outputDim], labels, device) | |
[ | |
input,inputValues | |
output,outputValues | |
] | |
|> dict | |
let loss = CNTKLib.SquaredError(new Variable(predictor), output, "loss") | |
let evaluation = CNTKLib.SquaredError(new Variable(predictor), output, "evaluation") | |
let learningRatePerSample = new TrainingParameterScheduleDouble(0.05, uint32 1) | |
let learners = | |
ResizeArray<Learner>( | |
[ | |
Learner.SGDLearner(predictor.Parameters(), learningRatePerSample) | |
] | |
) | |
let trainer = Trainer.CreateTrainer(predictor, loss, evaluation, learners) | |
#time "on" | |
for _ in 1 .. 1000 do | |
let example = batch () | |
trainer.TrainMinibatch(example,true,device) |> ignore | |
trainer.PreviousMinibatchLossAverage () |> printfn "Loss: %f" | |
open System.Collections.Generic | |
let inputMap = | |
let map = Dictionary<Variable,Value>() | |
map | |
let weights = | |
predictor.Parameters () | |
|> Seq.find (fun p -> p.Name = "weights") | |
let constant = | |
predictor.Parameters () | |
|> Seq.find (fun p -> p.Name = "constant") | |
let outputMap = | |
let map = Dictionary<Variable,Value>() | |
map.Add(weights, null) | |
map.Add(constant, null) | |
map | |
predictor.Evaluate(inputMap,outputMap,device) | |
let currentWeights = | |
outputMap.[weights].GetDenseData<float>(weights) | |
|> Seq.map (fun x -> x |> Seq.toArray) | |
|> Seq.toArray | |
let currentConstant = | |
outputMap.[constant].GetDenseData<float>(constant) | |
|> Seq.map (fun x -> x |> Seq.toArray) | |
|> Seq.toArray |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment