Last active
June 9, 2017 06:48
-
-
Save dyigitpolat/67d01d72e2aa94d8f8ec41079613602b to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// inside the main class | |
// | |
public void trainNetwork() | |
{ | |
for( int i = 0; i < 1000000; i++) | |
{ | |
int r = (int) (Math.random() * 100000) % 4; | |
int a = r % 2; | |
int b = (r >> 1) % 2; | |
int c = a ^ b; | |
double y = output.output(); | |
double dErr = (y - c); //Err = 0.5*(y - c)*(y - c) | |
input1.setInput( a); //ok | |
input2.setInput( b); //ok | |
output.backPropagate( dErr ); // ? | |
} | |
} | |
// | |
// inside the neuron class | |
// | |
public void backPropagate( double dErr) | |
{ | |
if( isInputNeuron) | |
{ | |
return; | |
} | |
double hj = inputSum(); //works well | |
double yj = output(); //works well | |
for( int i = 0; i < inputSynapses.size(); i++) | |
{ | |
double xi = inputSynapses.get(i).output(); | |
double dErrj = activationDerivative( hj)*dErr; | |
inputSynapses.get(i).backPropagate( dErrj*weights[i] ); //beware, recursion. | |
weights[i] -= learningRate*dErrj*xi; | |
} | |
} | |
private double activationDerivative( double x) //sigmoid | |
{ | |
return activation(x)*(1.0 - activation(x)); | |
} | |
private double activation( double arg) //sigmoid | |
{ | |
return 1.0/(1 + Math.exp( -arg)); | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment