Skip to content

Instantly share code, notes, and snippets.

@simogasp
Last active August 14, 2019 10:18
Show Gist options
  • Save simogasp/cce46fed42faece0c6577370e7ed9d14 to your computer and use it in GitHub Desktop.
Save simogasp/cce46fed42faece0c6577370e7ed9d14 to your computer and use it in GitHub Desktop.
How to estimate a rotation from pure rotations
// This file is part of the AliceVision project.
// Copyright (c) 2019 AliceVision contributors.
// This Source Code Form is subject to the terms of the Mozilla Public License,
// v. 2.0. If a copy of the MPL was not distributed with this file,
// You can obtain one at https://mozilla.org/MPL/2.0/.
#include <aliceVision/numeric/numeric.hpp>
#include <aliceVision/robustEstimation/ACRansacKernelAdaptator.hpp>
#include <aliceVision/robustEstimation/ACRansac.hpp>
#include <aliceVision/multiview/homographyKernelSolver.hpp>
#include <aliceVision/multiview/conditioning.hpp>
#include <aliceVision/system/Logger.hpp>
/**
* @brief Decompose a homography given known calibration matrices, assuming a pure rotation between the two views.
* It is supposed that \f$ x_2 \sim H x_1 \f$ with \f$ H = K_2 * R * K_1^{-1} \f$
* @param[in] homography 3x3 homography matrix H.
* @param[in] K1 3x3 calibration matrix of the first view.
* @param[in] K2 3x3 calibration matrix of the second view.
* @return The 3x3 rotation matrix corresponding to the pure rotation between the views.
*/
aliceVision::Mat3 decomposePureRotationHomography(const aliceVision::Mat3 &homography, const aliceVision::Mat3 &K1,
const aliceVision::Mat3 &K2)
{
// G is the "calibrated" homography inv(K2) * H * K1
const auto G = K2.inverse() * homography * K1;
// compute the scale factor lambda that makes det(lambda*G) = 1
const auto lambda = std::pow(1 / G.determinant(), 1 / 3);
const auto rotation = lambda * G;
//@fixme find possible bad cases?
// compute and return the closest rotation matrix
Eigen::JacobiSVD<aliceVision::Mat3> usv(rotation, Eigen::ComputeFullU | Eigen::ComputeFullV);
const auto &u = usv.matrixU();
const auto vt = usv.matrixV().transpose();
return u * vt;
}
/**
* @brief Estimate the homography between two views using corresponding points such that \f$ x_2 \sim H x_1 \f$
* @param[in] x1 The points on the first image.
* @param[in] x2 The corresponding points on the second image.
* @param[in] imgSize1 The size of the first image.
* @param[in] imgSize2 The size of the second image.
* @param[out] H The estimated homography.
* @param[out] vec_inliers The inliers satisfying the homography as a list of indices.
* @return the status of the estimation.
*/
aliceVision::EstimationStatus robustHomographyEstimationAC(const aliceVision::Mat2X &x1,
const aliceVision::Mat2X &x2,
const std::pair<std::size_t, std::size_t> &imgSize1,
const std::pair<std::size_t, std::size_t> &imgSize2,
aliceVision::Mat3 &H,
std::vector<std::size_t> &vec_inliers)
{
using KernelType = aliceVision::robustEstimation::ACKernelAdaptor<
aliceVision::homography::kernel::FourPointSolver,
aliceVision::homography::kernel::AsymmetricError,
aliceVision::UnnormalizerI,
aliceVision::Mat3>;
KernelType kernel(x1, imgSize1.first, imgSize1.second,
x2, imgSize2.first, imgSize2.second,
false); // configure as point to point error model.
const std::pair<double, double> ACRansacOut = aliceVision::robustEstimation::ACRANSAC(kernel, vec_inliers,
1024,
&H,
std::numeric_limits<double>::infinity());
const bool valid{!vec_inliers.empty()};
//@fixme
const bool hasStrongSupport{vec_inliers.size() > KernelType::MINIMUM_SAMPLES * 2.5};
return {valid, hasStrongSupport};
}
/**
* @brief A struct containing the information of the relative rotation.
*/
struct RelativeRotationInfo
{
/**
* @brief Default constructor.
*/
RelativeRotationInfo() = default;
/// the homography.
aliceVision::Mat3 _homography{};
/// the relative rotation.
aliceVision::Mat3 _relativeRotation{};
/// the inliers.
std::vector<size_t> _inliers{};
/// initial threshold for the acransac process.
double _initialResidualTolerance{std::numeric_limits<double>::infinity()};
/// the estimated threshold found by acransac.
double _foundResidualPrecision{std::numeric_limits<double>::infinity()};
};
/**
* @brief Estimate the relative rotation between two views related by a pure rotation.
* @param[in] K1 3x3 calibration matrix of the first view.
* @param[in] K2 3x3 calibration matrix of the second view.
* @param[in] x1 The points on the first image.
* @param[in] x2 The corresponding points on the second image.
* @param[in] imgSize1 The size of the first image.
* @param[in] imgSize2 The size of the second image.
* @param[out] relativeRotationInfo Contains the result of the estimation.
* @param[in] maxIterationCount Max number of iteration for the ransac process.
* @return true if a homography has been estimated.
*/
bool robustRelativeRotation(const aliceVision::Mat3 &K1,
const aliceVision::Mat3 &K2,
const aliceVision::Mat2X &x1,
const aliceVision::Mat2X &x2,
const std::pair<size_t, size_t> &imgSize1,
const std::pair<size_t, size_t> &imgSize2,
RelativeRotationInfo &relativeRotationInfo,
const size_t maxIterationCount)
{
std::vector<std::size_t> vec_inliers{};
// estimate the homography
const auto status = robustHomographyEstimationAC(x1, x2, imgSize1, imgSize2, relativeRotationInfo._homography,
relativeRotationInfo._inliers);
if (!status.isValid && !status.hasStrongSupport)
{
return false;
}
relativeRotationInfo._relativeRotation = decomposePureRotationHomography(relativeRotationInfo._homography, K1, K2);
ALICEVISION_LOG_INFO("Found homography H:\n" << relativeRotationInfo._homography);
ALICEVISION_LOG_INFO("Homography H decomposes to rotation R:\n" << relativeRotationInfo._relativeRotation);
return true;
}
int main(int argc, char *argv[])
{
{
using namespace aliceVision;
// rotation between the two views
const Mat3 rotation = aliceVision::rotationXYZ(0.01, -0.001, -0.2);
ALICEVISION_LOG_INFO("Ground truth rotation:\n" << rotation);
// some random 3D points on the unit sphere
const Mat3X pts3d = Mat3X::Random(3, 20).colwise().normalized();
ALICEVISION_LOG_INFO("points 3d:\n" << pts3d);
// calibration 1
Mat3 K1;
K1 << 1200, 0, 960,
0, 1200, 540,
0, 0, 1;
ALICEVISION_LOG_INFO("K1:\n" << K1);
// calibration 2
Mat3 K2;
K2 << 1100, 0, 960,
0, 1100, 540,
0, 0, 1;
ALICEVISION_LOG_INFO("K2:\n" << K2);
// 2d points on each side as projection of the 3D points
const Mat2X pts1 = (K1 * pts3d).colwise().hnormalized();
// ALICEVISION_LOG_INFO("pts1:\n" << pts1);
const Mat2X pts2 = (K2 * rotation * pts3d).colwise().hnormalized();
// ALICEVISION_LOG_INFO("pts2:\n" << pts2);
// test the uncalibrated version, just pass the 2d points and compute R
RelativeRotationInfo rotationInfo{};
ALICEVISION_LOG_INFO("\n\n###########################################\nUncalibrated");
robustRelativeRotation(K1, K2, pts1, pts2,
std::make_pair<std::size_t, std::size_t>(1920, 1080),
std::make_pair<std::size_t, std::size_t>(1920, 1080),
rotationInfo,1000);
ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation);
// test the calibrated version, compute normalized points for each view (by multiplying by the inverse of K) and estimate H and R
ALICEVISION_LOG_INFO("\n\n###########################################\nCalibrated");
robustRelativeRotation(Mat3::Identity(), Mat3::Identity(),
(K1.inverse()*pts1.colwise().homogeneous()).colwise().hnormalized(),
(K2.inverse()*pts2.colwise().homogeneous()).colwise().hnormalized(),
std::make_pair<std::size_t, std::size_t>(1920, 1080),
std::make_pair<std::size_t, std::size_t>(1920, 1080),
rotationInfo,1000);
ALICEVISION_LOG_INFO("rotation.inverse() * rotationInfo._relativeRotation:\n" << rotation.inverse() * rotationInfo._relativeRotation);
}
return EXIT_SUCCESS;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment