Skip to content

Instantly share code, notes, and snippets.

@karlnapf
Created July 10, 2013 16:56
Show Gist options
  • Save karlnapf/5968042 to your computer and use it in GitHub Desktop.
Save karlnapf/5968042 to your computer and use it in GitHub Desktop.
Shogun Gradient Model Selection example bugs
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Written (W) 2013 Heiko Strathmann
*/
#include <shogun/lib/config.h>
#ifdef HAVE_EIGEN3
#include <shogun/labels/RegressionLabels.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/kernel/GaussianKernel.h>
#include <shogun/regression/GaussianProcessRegression.h>
#include <shogun/machine/gp/ExactInferenceMethod.h>
#include <shogun/machine/gp/ZeroMean.h>
#include <shogun/machine/gp/GaussianLikelihood.h>
#include <shogun/base/init.h>
#include <shogun/evaluation/GradientEvaluation.h>
#include <shogun/modelselection/GradientModelSelection.h>
#include <shogun/modelselection/ModelSelectionParameters.h>
#include <shogun/modelselection/ParameterCombination.h>
#include <shogun/evaluation/GradientCriterion.h>
using namespace shogun;
CModelSelectionParameters* build_modelselection_tree(CInferenceMethod* inf,
CLikelihoodModel* lik, CKernel* kernel)
{
CModelSelectionParameters* root=new CModelSelectionParameters();
CModelSelectionParameters* c1=new CModelSelectionParameters(
"inference_method", inf);
root->append_child(c1);
CModelSelectionParameters* c2=new CModelSelectionParameters(
"likelihood_model", lik);
c1->append_child(c2);
CModelSelectionParameters* c3=new CModelSelectionParameters("sigma");
c2->append_child(c3);
c3->build_values(1.0, 4.0, R_LINEAR);
CModelSelectionParameters* c4=new CModelSelectionParameters("scale");
c1->append_child(c4);
c4->build_values(1.0, 1.0, R_LINEAR);
CModelSelectionParameters* c5=new CModelSelectionParameters("kernel",
kernel);
c1->append_child(c5);
CModelSelectionParameters* c6=new CModelSelectionParameters("width");
c5->append_child(c6);
c6->build_values(1.0, 4.0, R_LINEAR);
return root;
}
void test()
{
/* create some easy regression data: 1d noisy sine wave */
index_t n=100;
float64_t x_range=6;
SGMatrix<float64_t> X(1, n);
SGMatrix<float64_t> X_test(1, n);
SGVector<float64_t> Y(n);
for (index_t i=0; i<n; ++i)
{
X[i]=CMath::random(0.0, x_range);
X_test[i]=(float64_t)i/n*x_range;
Y[i]=CMath::sin(X[i]);
}
/* shogun representation */
CDenseFeatures<float64_t>* feat_train=new CDenseFeatures<float64_t>(X);
CDenseFeatures<float64_t>* feat_test=new CDenseFeatures<float64_t>(X_test);
CRegressionLabels* label_train=new CRegressionLabels(Y);
// width: 0.000002; scale: 1.400435; sigma: 95.395590
/* specity GPR with exact inference */
float64_t width=0.000002;
float64_t scale=1.400435;
float64_t sigma=95.395590;
CGaussianKernel* kernel=new CGaussianKernel(10, width);
CZeroMean* mean=new CZeroMean();
CGaussianLikelihood* lik=new CGaussianLikelihood(sigma);
CExactInferenceMethod* inf=new CExactInferenceMethod(kernel, feat_train,
mean, label_train, lik);
inf->set_scale(scale);
CGaussianProcessRegression* gp=new CGaussianProcessRegression(inf);
CModelSelectionParameters* tree=build_modelselection_tree(inf, lik, kernel);
tree->print_tree();
CGradientCriterion* crit=new CGradientCriterion();
CGradientEvaluation* grad=new CGradientEvaluation(gp, feat_train,
label_train, crit);
grad->set_function(inf);
CGradientModelSelection* grad_search=new CGradientModelSelection(tree,
grad);
grad->set_autolock(false);
CParameterCombination* best_combination=grad_search->select_model(true);
SG_UNREF(best_combination);
SG_UNREF(grad_search);
SG_UNREF(tree);
SG_UNREF(feat_test);
SG_UNREF(gp);
}
int main(int argc, char** argv)
{
init_shogun_with_defaults();
test();
exit_shogun();
return 0;
}
#else
int main(int argc, char **argv)
{
return 0;
}
#endif
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment