Skip to content

Instantly share code, notes, and snippets.

@Caaz
Created September 18, 2018 21:56
Show Gist options
  • Save Caaz/261ffc91e67446a22378b5a2eb077fc3 to your computer and use it in GitHub Desktop.
Save Caaz/261ffc91e67446a22378b5a2eb077fc3 to your computer and use it in GitHub Desktop.
Multi Layered Perceptron
_mlp = _{
new = function(t,a)
merge(t,{
learning_rate = 0.5,
},a)
-- inputs is a thing
t.hidden_layer = _neuron_layer{count=t.hidden}
t.output_layer = _neuron_layer{count=t.outputs}
-- initialize neuron layers with weights
t:iwfithln(t.hidden_layer_weights)
t:iwfhlntoln(t.output_layer_weights)
end,
iwfithln = function(t,hlw)
for i, n in pairs(t.hidden_layer.neurons) do
for j=1, t.inputs do
n.weights[j] = (hlw and hlw[j] or rnd())
end
end
end,
iwfhlntoln = function(t,olw)
for i, n in pairs(t.output_layer.neurons) do
for j=1, t.hidden_layer.count do
n.weights[j] = (olw and olw[j] or rnd())
end
end
end,
-- feed forward through the network to obtain an output
ff = function(t,inputs)
local hlo = t.hidden_layer:ff(inputs)
return t.output_layer:ff(hlo)
end,
-- train neural network using backpropagation
train = function(t,ti,to)
-- feed forward
t:ff(ti)
-- 1. output neuron deltas
-- pd_errors_wrt_output_neuron_total_net_input
pewontni = {}
for i,n in pairs(t.output_layer.neurons) do
-- partial derivitive of total error with respect to the weights
pewontni[i] = n:cpewtni(to[i])
end
-- 2. hidden neuron deltas
pewhntni = {}
for i,n in pairs(t.hidden_layer.neurons) do
-- we need to calculate the derivative of the error with respect to the output of each hidden layer neuron
local dewhno = 0
for j,on in pairs(t.output_layer.neurons) do
dewhno += pewontni[j] * on.weights[i]
end
pewhntni[i] = dewhno * n:cptniwi()
end
-- 3. update output neuron weights
for o,n in pairs(t.output_layer.neurons) do
for w_ho,w in pairs(n.weights) do
local pd_error_wrt_weight = pewontni[o] * n:cptniww(w_ho)
n.weights[w_ho] -= t.learning_rate * pd_error_wrt_weight
end
end
-- 4. update hidden neuron weights
for h,n in pairs(t.hidden_layer.neurons) do
for w_ih, w in pairs(n.weights) do
local pd_error_wrt_weight = pewhntni[h] * n:cptniww(w_ih)
n.weights[w_ih] -= t.learning_rate * pd_error_wrt_weight
end
end
end
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment