Created
December 2, 2015 15:19
-
-
Save chiral/f647b63aca50af6214ea to your computer and use it in GitHub Desktop.
example of trivial optimization problem on 2-d circle for the purpose of comparison between ceres-solver and TensorFlow
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include "ceres/ceres.h" | |
#include "glog/logging.h" | |
using ceres::CostFunction; | |
using ceres::AutoDiffCostFunction; | |
using ceres::Problem; | |
using ceres::Solver; | |
using ceres::Solve; | |
struct CostFunctor { | |
template <typename T> | |
bool operator()(const T* const x, T* residual) const { | |
residual[0] = T(1.0) - cos(x[0]); | |
residual[1] = T(1.0) - sin(x[0]); | |
return true; | |
} | |
}; | |
int main(int argc, char** argv) { | |
google::InitGoogleLogging(argv[0]); | |
const double pi = 3.1415926535; | |
double x[1] = {0}; | |
Problem problem; | |
CostFunction* cost_function = | |
new AutoDiffCostFunction<CostFunctor, 2, 1>(new CostFunctor); | |
problem.AddResidualBlock(cost_function, NULL, x); | |
Solver::Options options; | |
options.minimizer_progress_to_stdout = true; | |
Solver::Summary summary; | |
Solve(options, &problem, &summary); | |
std::cout << summary.BriefReport() << "\n"; | |
std::cout << "opt_x:" << x[0] << "\n"; | |
std::cout << "ans_x:" << pi/4 << "\n"; | |
return 0; | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
import numpy as np | |
o = np.array([1,1]) | |
theta = tf.Variable(tf.zeros([1])) | |
xy = tf.concat(0,[tf.cos(theta),tf.sin(theta)]) | |
loss = tf.reduce_mean(tf.square(xy - o)) | |
optimizer = tf.train.AdagradOptimizer(0.5) | |
train = optimizer.minimize(loss) | |
init = tf.initialize_all_variables() | |
sess = tf.Session() | |
sess.run(init) | |
for step in xrange(201): | |
sess.run(train) | |
if step % 20 == 0: | |
print step, sess.run(theta) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment