Skip to content

Instantly share code, notes, and snippets.

@ear
Created November 1, 2017 21:54
Show Gist options
  • Save ear/19160529bc641b8809c786ab9e883fe6 to your computer and use it in GitHub Desktop.
Save ear/19160529bc641b8809c786ab9e883fe6 to your computer and use it in GitHub Desktop.
module Main where
import Data.List (unfoldr, mapAccumL, foldl')
import Data.Matrix
import Data.Foldable (sum)
import System.Random
layers = [28*28,16,16,10]
pairs :: [(Int,Int)]
pairs = unfoldr collect2 layers
where
collect2 xs@(x:(~ys@(y:_))) | length xs < 2 = Nothing
| otherwise = Just ((y,x),ys)
type Bias = Float
type Weight = Float
type Layer = (Matrix Weight, Matrix Bias) -- (k x n, k x 1)
type NeuralNet = [Layer]
randomLayers :: RandomGen g => [(Int,Int)] -> g -> NeuralNet
randomLayers dims g = snd $ mapAccumL generate (randomRs (-1.0,1.0) g) dims
where
generate rs (rows,cols) =
let (weights,rs') = splitAt (rows*cols) rs
(biases,rs'') = splitAt (rows) rs'
in (rs'', (fromList rows cols weights, fromList rows 1 biases))
sigmoid u = u * exp((-u**2)/2)
activate :: Matrix Float -- n x 1
-> NeuralNet -- first layer has n inputs
-> Matrix Float -- as big as the last layer
activate = foldl' propagate
where
propagate as (ws, bs) =
colVector (sigmoid <$> getMatrixAsVector (ws * as + bs))
-- | ss - Sum of Squares of differences
ss :: Matrix Float -- column vector
-> Matrix Float -- column vector (same dimension)
-> Float
ss v w = sum $ fmap (\x -> x*x) $ v - w
-- -- | cost
-- cost :: NeuralNet -> Training -> Float
-- cost = undefined
main = do
g <- getStdGen
let nn = randomLayers pairs g
putStrLn "[+] network:"
print nn
let a0 = fromList (head layers) 1 $ cycle [0.1]
putStrLn "[+] activation results:"
print $ activate a0 nn
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment