Skip to content

Instantly share code, notes, and snippets.

@LaurentBerger
Created December 5, 2017 21:44
Show Gist options
  • Save LaurentBerger/a66f42a0d378090f885b7e35eaa66c87 to your computer and use it in GitHub Desktop.
Save LaurentBerger/a66f42a0d378090f885b7e35eaa66c87 to your computer and use it in GitHub Desktop.
#define GENERATE_TESTDATA
TEST(ML_ANN, Method)
{
String folder = string(cvtest::TS::ptr()->get_data_path());
String original_path = folder + "waveform.data";
String dataname = folder + "waveform";
Ptr<TrainData> tdata2 = TrainData::loadFromCSV(original_path, 0);
Mat responses(tdata2->getResponses().rows, 3, CV_32FC1, Scalar(0));
for (int i = 0; i<tdata2->getResponses().rows; i++)
responses.at<float>(i, static_cast<int>(tdata2->getResponses().at<float>(i, 0))) = 1;
Ptr<TrainData> tdata = TrainData::create(tdata2->getSamples(), ml::ROW_SAMPLE, responses);
ASSERT_FALSE(tdata.empty()) << "Could not find test data file : " << original_path;
RNG& rng = theRNG();
rng.state = 1027401484159173092;
tdata->setTrainTestSplitRatio(0.8);
vector<int> methodType;
methodType.push_back(ml::ANN_MLP::BACKPROP);
methodType.push_back(ml::ANN_MLP::RPROP);
methodType.push_back(ml::ANN_MLP::ANNEAL);
vector<String> methodName;
methodName.push_back("_backprop");
methodName.push_back("_rprop");
methodName.push_back("_anneal");
#ifdef GENERATE_TESTDATA
rng.state = 1027401484159173092;
Ptr<ml::ANN_MLP> xx = ml::ANN_MLP::create();
Mat_<int> layerSizesXX(1, 3);
layerSizesXX(0, 0) = tdata->getNVars();
layerSizesXX(0, 1) = 30;
layerSizesXX(0, 2) = tdata->getResponses().cols;
xx->setLayerSizes(layerSizesXX);
xx->setActivationFunction(ml::ANN_MLP::SIGMOID_SYM);
xx->setTrainMethod(ml::ANN_MLP::RPROP);
xx->setTermCriteria(TermCriteria(TermCriteria::COUNT, 1, 0.01));
xx->train(tdata, ml::ANN_MLP::NO_OUTPUT_SCALE);
xx->save(dataname + "_init_weight.yml");
#endif
cout<<" BEGIN\n";
for (size_t i = 0; i < methodType.size(); i++)
{
rng.state = 1027401484159173092;
Ptr<ml::ANN_MLP> x = Algorithm::load<ANN_MLP>(dataname + "_init_weight.yml");
// Ptr<ml::ANN_MLP> x = ml::ANN_MLP::create();
ASSERT_TRUE(x!=NULL) << "Could not load init weigths\n " ;
x->setTrainMethod(methodType[i]);
x->setTermCriteria(TermCriteria(TermCriteria::COUNT, 1, 0.01)); // ONLY ONE ITERATION
for (int jj=0;jj<10;jj++)
{
x->train(tdata, ml::ANN_MLP::NO_OUTPUT_SCALE + ml::ANN_MLP::UPDATE_WEIGHTS);
x->save(format("%s%s%d.yml",dataname.c_str() , methodName[i].c_str(),jj ));// SAVE RESULTS AT ITERATION #jj
}
ASSERT_TRUE(x->isTrained()) << "Could not train networks with " << methodName[i];
#ifdef GENERATE_TESTDATA
x->save(dataname + methodName[i] + ".yml");
#else
Ptr<ml::ANN_MLP> y = Algorithm::load<ANN_MLP>(dataname + methodName[i] + ".yml");
ASSERT_TRUE(y != NULL) << "Could not load " << dataname + methodName[i] + ".yml";
Mat testSamples = tdata->getTestSamples();
Mat rx, ry, dst;
for (int j = 0; j < 4; j++)
{
rx = x->getWeights(j);
ry = y->getWeights(j);
double n = cvtest::norm(rx, ry, NORM_INF);
EXPECT_LT(n, FLT_EPSILON) << "Weights are not equal for " << dataname + methodName[i] + ".yml and " << methodName[i] << " layer : " << j;
}
x->predict(testSamples, rx);
y->predict(testSamples, ry);
double n = cvtest::norm(rx, ry, NORM_INF);
EXPECT_LT(n, FLT_EPSILON) << "Predict are not equal for " << dataname + methodName[i] + ".yml and " << methodName[i];
#endif
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment