Skip to content

Instantly share code, notes, and snippets.

@OlegJakushkin
Created April 27, 2017 17:02
Show Gist options
  • Save OlegJakushkin/1e100ce94a64b0a1daf9ba3cab66503e to your computer and use it in GitHub Desktop.
Save OlegJakushkin/1e100ce94a64b0a1daf9ba3cab66503e to your computer and use it in GitHub Desktop.
#include <iostream>
#include <vector>
#include <filesystem>
#include <opencv2/world.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <PixelClassifier.h>
#include <opencv2/flann/miniflann.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/video.hpp>
#include <Utils.h>
using namespace cv;
using namespace std;
PixelClassifier classifier;
Mat overlay(Mat back, Mat mask) {
Mat result = mask.clone();
cvtColor(result,result, CV_GRAY2BGR);
cvtColor(result,result, CV_BGR2HSV);
for (auto j = 0; j < result.rows; ++j) {
for (auto i = 0; i < result.cols; ++i) {
auto & pix = result.at<Vec3b>(j,i);
if(pix.val[2] > 200) {
pix.val[1] = 255;
pix.val[0] = 125;
}
}
}
cvtColor(result,result, CV_HSV2BGR);
addWeighted(back, 0.9, result, 0.9, 0.0, result);
return result;
}
Mat overlay(Mat back, Mat mask, Mat mask2) {
Mat result = mask.clone();
cvtColor(result,result, CV_GRAY2BGR);
cvtColor(result,result, CV_BGR2HSV);
for (auto j = 0; j < result.rows; ++j) {
for (auto i = 0; i < result.cols; ++i) {
auto & pix = result.at<Vec3b>(j,i);
if(mask2.at<uchar>(j,i) > 200) {
pix.val[1] = 255;
pix.val[0] = 185;
} else if(pix.val[2] > 200) {
pix.val[1] = 255;
pix.val[0] = 125;
}
}
}
cvtColor(result,result, CV_HSV2BGR);
addWeighted(back, 0.9, result, 0.9, 0.0, result);
return result;
}
float alpha = 2;
float beta = 1;
void pumpContrast(Mat & image) {
for( int y = 0; y < image.rows; y++ ) {
for( int x = 0; x < image.cols; x++ ) {
for( int c = 0; c < 3; c++ ) {
image.at<Vec3b>(y,x)[c] =
saturate_cast<uchar>( alpha*( image.at<Vec3b>(y,x)[c] ) + beta );
}
}
}
}
int main (){
auto back = imread("./back.png", CV_LOAD_IMAGE_ANYCOLOR);
classifier.SetBack(back);
auto size = Size(10,10);
blur( back, back, size);
back = classifier.equalizeIntensity(back);
auto t1 = imread("./out00441.png", CV_LOAD_IMAGE_ANYCOLOR);
auto t2 = imread("./out00442.png", CV_LOAD_IMAGE_ANYCOLOR);
auto bname = "bgs";
namedWindow( bname, CV_WINDOW_FREERATIO );
resizeWindow(bname, 700, 600);
Mat fgm, current, bg;
{
ScopedTimer t("bgs");
auto bs = createBackgroundSubtractorMOG2(300, 10, true);
string dir = "../video";
int counter = 0;
tr2::sys::directory_iterator it(dir), end;
for (; it!= end; ++it) {
if(++counter == 1) {
bs->apply(back, fgm, 0.9);
}
auto path = dir + "/" + it->path();
ScopedTimer tt(path);
current = imread(path, CV_LOAD_IMAGE_ANYCOLOR);
blur( current, current, size);
current = classifier.equalizeIntensity(current);
bs->apply(current, fgm, 0.01);
blur( fgm, fgm, size);
threshold(fgm, fgm, 220, 255, CV_THRESH_TOZERO);
auto blobs = classifier.findBigBlobs(fgm,100);
imshow(bname, overlay(current, fgm, blobs));
waitKey(1);
}
bs->getBackgroundImage(bg);
}
cout << "fc" << fgm.channels() << " " << current.channels() << endl;
imshow("bg", bg);
auto b1 = classifier.Detect(t1);
auto b2 = classifier.Detect(t2);
Mat result;
hconcat(overlay(t1, b1), overlay(t2, b2), result);
auto name = "Tracking Test";
namedWindow( name, CV_WINDOW_FREERATIO );
resizeWindow(name, 700, 600);
//imshow(name, result);
auto aka = AKAZE::create();
FlannBasedMatcher matcher(new cv::flann::LshIndexParams(5, 24, 2));
vector<KeyPoint> k1, k2;
Mat d1, d2;
vector< vector<DMatch> > nn_matches;
{
ScopedTimer t("features");
aka->detectAndCompute(t1, b1, k1, d1);
aka->detectAndCompute(t2, b2, k2, d2);
}
{
ScopedTimer t("matches");
matcher.knnMatch(d1, d2, nn_matches, 2);
}
Mat res;
drawMatches(t1, k1, t2, k2, nn_matches, res);
addWeighted(result, 0.9, res, 0.9, 0.0, result);
imshow(name, result);
waitKey();
cin.get();
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment