Skip to content

Instantly share code, notes, and snippets.

@sato-cloudian
Created December 17, 2015 05:46
Show Gist options
  • Select an option

  • Save sato-cloudian/899f30f4a8a05d414094 to your computer and use it in GitHub Desktop.

Select an option

Save sato-cloudian/899f30f4a8a05d414094 to your computer and use it in GitHub Desktop.
MyMLPMnistSingleLayerExample experiments
1. Don't forget to set pretrain(false) and backprop(true)
default values are:
pretrain = true
backprop = false
So, if you miss to set explicitly, you'll see a wierd result.
2. Start with the simplest network
The more network you have, it becomes harder to see improvements.
3. "iterations" are done for each dataset
4. numOut from the 1st layer, and numIn to the 2nd layer made the difference
package org.deeplearning4j.examples.mlp;
import org.deeplearning4j.datasets.iterator.DataSetIterator;
import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.GradientNormalization;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.api.IterationListener;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.ui.weights.HistogramIterationListener;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.SplitTestAndTrain;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Created by agibsonccc on 9/11/14.
*
* Diff from small single layer
*/
public class MyMLPMnistSingleLayerExample {
private static Logger log = LoggerFactory.getLogger(MyMLPMnistSingleLayerExample.class);
public static void main(String[] args) throws Exception {
Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
final int numRows = 28;
final int numColumns = 28;
int outputNum = 10;
int numSamples =10000;
int batchSize = 500;
int iterations = 10;
int seed = 123;
int listenerFreq = iterations/10;
int splitTrainNum = (int) (batchSize*.8);
DataSet mnist;
SplitTestAndTrain trainTest;
DataSet trainInput;
List<INDArray> testInput = new ArrayList<>();
List<INDArray> testLabels = new ArrayList<>();
log.info("Load data....");
DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples,true);
log.info("Build model....");
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.iterations(iterations)
//.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.learningRate(1e-1)
//.momentum(0.5)
//.momentumAfter(Collections.singletonMap(3, 0.9))
//.useDropConnect(true)
.list(2)
.layer(0, new DenseLayer.Builder()
.nIn(numRows * numColumns) // 28*28=784
.nOut(400)
.activation("relu")
.weightInit(WeightInit.XAVIER)
.build())
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT)
.nIn(400)
.nOut(outputNum)
.activation("softmax")
.weightInit(WeightInit.XAVIER)
.updater(Updater.SGD)
.build())
.backprop(true)
.pretrain(false)
.build();
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq), new HistogramIterationListener(listenerFreq)));
log.info("Train model....");
while(mnistIter.hasNext()) {
mnist = mnistIter.next();
trainTest = mnist.splitTestAndTrain(splitTrainNum, new Random(seed)); // train set that is the result
trainInput = trainTest.getTrain(); // get feature matrix and labels for training
testInput.add(trainTest.getTest().getFeatureMatrix());
testLabels.add(trainTest.getTest().getLabels());
model.fit(trainInput);
}
log.info("Evaluate model....");
Evaluation eval = new Evaluation(outputNum);
for(int i = 0; i < testInput.size(); i++) {
INDArray output = model.output(testInput.get(i));
eval.eval(testLabels.get(i), output);
}
log.info(eval.stats());
log.info("****************Example finished********************");
}
}
784 => 500 => 10
==========================Scores========================================
Accuracy: 0.906
Precision: 0.9067
Recall: 0.9041
F1 Score: 0.9053704979400815
===========================================================================
784 => 400 => 10
==========================Scores========================================
Accuracy: 0.9065
Precision: 0.9069
Recall: 0.9047
F1 Score: 0.905752233788593
===========================================================================
784 => 300 => 10
==========================Scores========================================
Accuracy: 0.9045
Precision: 0.9043
Recall: 0.9021
F1 Score: 0.9031987332520014
===========================================================================
784 => 100 => 10
==========================Scores========================================
Accuracy: 0.8945
Precision: 0.8942
Recall: 0.8917
F1 Score: 0.8929378519501043
===========================================================================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment