Created
May 21, 2018 03:59
-
-
Save tbenst/54e6635335c4735de677b9fb57001d64 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using Plots | |
x = reshape(collect(Iterators.flatten(Iterators.product(0:1,0:1))),(2,4))' | |
y_and = float(mapslices(x -> x[1] & x[2],x,2))*2-1 | |
y_xor = float(mapslices(x -> x[1] ⊻ x[2],x,2))*2-1 | |
x = hcat(ones(4),float(x))*2-1 | |
ΔW(x, y, σ, lr) = σ==y ? 0 : 2*σ*x*lr | |
type Perceptron | |
W::Array{<:Real,2} | |
end | |
(model::Perceptron)(X::Array{<:Real,1}) = ((model.W*X).>0)*2-1 | |
function train(model::Perceptron, X::Array{<:Real,2}, Y::Array{<:Real,2}, nepochs=1, lr=1) | |
for e = 1:nepochs | |
# iterate examples | |
for n in 1:size(X,1) | |
x = X[n,:] | |
y = Y[n,:] | |
pred = model(x) | |
# iterate outputs | |
for (i,p) in enumerate(pred) | |
# update weight vector | |
model.W[i,:] .+= ΔW.(x, p, y[i],lr) | |
end | |
end | |
end | |
end | |
## | |
model = Perceptron(rand((1,3))) | |
train(model,x, y_and,10,1) | |
print("AND succedded: $(0==sum(y_and-mapslices(model,x,2)))") | |
## | |
model = Perceptron(rand((1,3))) | |
train(model,x, y_xor,100,1) | |
print("XOR succedded: $(0==sum(y_xor-mapslices(model,x,2)))") | |
reps = 25 | |
maxpatterns = 100 | |
successes = zeros(maxpatterns) | |
for r=1:reps | |
for i=1:maxpatterns | |
X = rand(0:1,i,10)*2-1 | |
Y = rand(0:1,i,1)*2-1 | |
model = Perceptron(rand((1,10))) | |
train(model, X, Y, 100, 1) | |
if 0==sum(Y-mapslices(model,X,2)) | |
successes[i]+=1 | |
end | |
end | |
end | |
plot(successes/reps, legend=false) | |
ylabel!("Recall Probability (%)") | |
xlabel!("Number of patterns") | |
title!("Success rate for single binary perceptron with 25 repetitions") | |
savefig("recall_prob.png") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment