Created
January 24, 2013 19:20
-
-
Save quarnster/4626672 to your computer and use it in GitHub Desktop.
Neural Network in Haskell with some memory usage wierdness
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
-- Compile with ghc -O2 -msse4.2 --make nn -rtsopts | |
-- Run with ./nn False +RTS -s | |
-- ..... | |
-- 812,842,896 bytes allocated in the heap | |
-- 592,620,616 bytes copied during GC | |
-- 63,771,584 bytes maximum residency (11 sample(s)) | |
-- 1,063,968 bytes maximum slop | |
-- 180 MB total memory in use (0 MB lost due to fragmentation) | |
-- | |
-- Tot time (elapsed) Avg pause Max pause | |
-- Gen 0 1511 colls, 0 par 0.48s 0.48s 0.0003s 0.0122s | |
-- Gen 1 11 colls, 0 par 0.49s 0.54s 0.0494s 0.1186s | |
-- | |
-- INIT time 0.00s ( 0.00s elapsed) | |
-- MUT time 0.20s ( 0.21s elapsed) | |
-- GC time 0.97s ( 1.03s elapsed) | |
-- EXIT time 0.00s ( 0.00s elapsed) | |
-- Total time 1.18s ( 1.23s elapsed) | |
-- | |
-- %GC time 82.7% (83.3% elapsed) | |
-- | |
-- Alloc rate 3,989,608,795 bytes per MUT second | |
-- | |
-- Productivity 17.3% of total user, 16.5% of total elapsed | |
-- | |
-- But if it's run with ./nn True +RTS -s | |
-- ..... | |
-- 1,555,489,496 bytes allocated in the heap | |
-- 187,868,488 bytes copied during GC | |
-- 231,840 bytes maximum residency (78 sample(s)) | |
-- 39,400 bytes maximum slop | |
-- 2 MB total memory in use (0 MB lost due to fragmentation) | |
-- | |
-- Tot time (elapsed) Avg pause Max pause | |
-- Gen 0 2900 colls, 0 par 0.28s 0.29s 0.0001s 0.0005s | |
-- Gen 1 78 colls, 0 par 0.02s 0.02s 0.0002s 0.0004s | |
-- | |
-- INIT time 0.00s ( 0.00s elapsed) | |
-- MUT time 0.31s ( 0.68s elapsed) | |
-- GC time 0.30s ( 0.30s elapsed) | |
-- EXIT time 0.00s ( 0.00s elapsed) | |
-- Total time 0.61s ( 0.98s elapsed) | |
-- | |
-- %GC time 48.7% (30.9% elapsed) | |
-- | |
-- Alloc rate 4,947,155,234 bytes per MUT second | |
-- | |
-- Productivity 51.3% of total user, 32.1% of total elapsed | |
import Text.Printf | |
import System.Random | |
import Data.List | |
import System.Environment | |
import Debug.Trace | |
class Executable a where | |
execute :: a -> [Float] -> [Float] | |
---------------------------------------------------------------------- | |
data Neuron = Neuron {weights :: [Float]} | |
instance Executable Neuron where | |
execute (Neuron n) i = [stdsigmoid $ foldl (+) bias $ zipWith (*) w i] | |
where (bias:w) = n | |
instance Show Neuron where | |
show (Neuron w) = show w | |
---------------------------------------------------------------------- | |
data NeuronLayer = NeuronLayer {neurons :: [Neuron]} | |
instance Executable NeuronLayer where | |
execute (NeuronLayer nl) i = map (\n -> head $ n `execute` i) nl | |
instance Show NeuronLayer where | |
show (NeuronLayer l) = "NeuronLayer\n" ++ foldl (\acc a -> acc ++ ('\t':show a) ++ "\n") "" l | |
---------------------------------------------------------------------- | |
data Network = Network {neuronLayer :: [NeuronLayer]} | |
instance Executable Network where | |
execute (Network ns) input = foldl (flip execute) input ns | |
instance Show Network where | |
show (Network l) = "Network\n" ++ foldl (\acc a -> acc ++ foldl (\acc' a' -> acc' ++ if a' == '\t' then "\t\t" else [a']) "\t" (show a) ++ "\n") "" l | |
fromList :: [Int] -> [Float] -> ([Float], Network) | |
fromList a rds = (fst flr, Network $ snd flr) | |
where | |
flr = foldl il (rds, []) (zip a $ tail a) | |
il (inr, inl) (inp, outp) = (fst ilr, inl ++ [snd ilr]) | |
where | |
ilr = initLayer inp outp inr | |
initLayer ins outs rds' = (fst ilr2, NeuronLayer $ snd ilr2) | |
where | |
ilr2 = initNeurons (outs-1) rds' [] | |
initNeurons i rs' acc' | |
| i == 0 = (fst res, acc' ++ [snd res]) | |
| otherwise = initNeurons (i-1) (fst res) (acc' ++ [snd res]) | |
where | |
res = initNeuron (ins+1) rs' | |
initNeuron a3 rs = (drop a3 rs, Neuron (take a3 rs)) | |
---------------------------------------------------------------------- | |
stdsigmoid :: (Floating a) => a -> a | |
stdsigmoid val = 1 / (1 + exp (-val)) | |
---------------------------------------------------------------------- | |
data TrainingSet = TrainingSet { inputs :: [Float], outputs :: [Float]} deriving Show | |
---------------------------------------------------------------------- | |
backprop :: Network -> [TrainingSet] -> Int -> Bool -> Network | |
backprop net set steps debug = foldl' (\net' _ -> dump net' foldl' trainset net' set) net $ replicate steps (0::Int) | |
where | |
dump val expr = if debug then traceShow val expr else val `seq` expr | |
learningRate = 0.25 | |
trainset net' set' = tweakNet net' layerError result | |
where | |
result = reverse $ foldl' (\acc a -> execute a (head acc):acc) [inputs set'] (neuronLayer net') | |
layerError = foldl' (\oerr nl -> | |
foldl' (\acc' (e, n) -> | |
zipWith (\a b -> a+b*e) | |
acc' | |
(tail $ weights n) | |
) | |
(replicate (length (weights $ head (neurons nl))-1) (0::Float)) | |
(zip (head oerr) (neurons nl)) | |
:oerr | |
) | |
[zipWith (-) (outputs set') (last result)] | |
(reverse $ tail $ neuronLayer net') | |
tweakNet net' layerError result = Network $ zipWith3 tweakLayer (neuronLayer net') layerError (zip result $ tail result) | |
tweakLayer l le (res, res1) = NeuronLayer $ zipWith3 (tweakNeuron res) res1 le (neurons l) | |
tweakNeuron inp myOutput myError n = Neuron $ zipWith (\a b -> a + delta * b) (weights n) (1:inp) | |
where | |
-- v * (1 - v) ~= derivation of sigmoid(val) | |
derivate = myOutput * (1 - myOutput) | |
delta = myError * derivate * learningRate | |
---------------------------------------------------------------------- | |
normalize :: (Real a) => (a, a) -> a -> Float | |
normalize (mi, ma) a = (fromRational $ toRational (a - mi)::Float) / (fromRational $ toRational (ma - mi)::Float) | |
---------------------------------------------------------------------- | |
score :: Network -> [TrainingSet] -> ([[Float]] -> Float) -> Float | |
score net set scorefunc = scorefunc $ map (execute net . inputs) set | |
---------------------------------------------------------------------- | |
main :: IO() | |
main = | |
do | |
(debug:_) <- getArgs | |
let n' = backprop n training 1000 (read debug::Bool) | |
putStr $ foldl (\ acc a -> let res = (execute n' $ inputs a) in acc ++ '\n':show (inputs a) ++ show (outputs a) ++ show res ++ show (zipWith (-) (outputs a) res)) "" training | |
printf "\n%0.3f\n" (score n' training (\a -> sum (zipWith (\x y -> abs (head x-head y)) a $ map outputs training))) | |
where | |
(training, il) = (map (\a -> TrainingSet [a/100] [normalize (-1, 1) (sin ((a/50)*pi))]) [0..100], [1, 5, 1]) | |
--(training, il) = ([ | |
-- TrainingSet [0,0] [0], | |
-- TrainingSet [0,1] [1], | |
-- TrainingSet [1,0] [1], | |
-- TrainingSet [1,1] [0]], [2,5,1]) | |
n = snd (fromList il rs) | |
gen = mkStdGen 1337 | |
rs = randomRs (-2, 2) gen::[Float] | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment