Skip to content

Instantly share code, notes, and snippets.

@HaruhiroTakahashi
Created September 21, 2017 08:56
Show Gist options
  • Save HaruhiroTakahashi/1102a2ba869bba7a45c5f668b4b69799 to your computer and use it in GitHub Desktop.
Save HaruhiroTakahashi/1102a2ba869bba7a45c5f668b4b69799 to your computer and use it in GitHub Desktop.
OpenCVの基本的なメソッドまとめ
#pragma once
#include "OCV.h"
#include <random>
using namespace::std;
using namespace::cv;
using cv::UMat;
IAffine::~IAffine()
{
cout << "IAffine破棄されたよ" << endl;
}
UMat OCV::Frip()
{
int flipCode = atoi("-1"); //atoi関数 = 文字列を数値に変換してくれるらしい 興味本位で使用
flip(src, dst, flipCode);
return dst;
}
UMat OCV::Scaling()
{
float scaleW = static_cast<float>(atof("1.5"));
float scaleH = static_cast<float>(atof("0.5"));
int width = static_cast<int>(src.cols*scaleW);
int hight = static_cast<int>(src.rows*scaleH);
resize(src, dst, Size(width, hight));
return dst;
}
UMat OCV::Rotate()
{
float angle = static_cast<float>(atof("23.2"));
Point2f center = Point2f(static_cast<float>(src.cols / 2), static_cast<float>(src.rows / 2));
getRotationMatrix2D(center, angle, 1.0).copyTo(affintrance);
warpAffine(src, dst, affintrance, dst.size(), INTER_CUBIC, BORDER_REPLICATE);
return dst;
}
UMat OCV::RotateCont()
{
Point2f center = Point2f(static_cast<float>(src.cols / 2), static_cast<float>(src.rows / 2));
namedWindow("test", CV_WINDOW_AUTOSIZE);
for (float angle = 0.0; angle < 360.0; angle += 90.0)
{
Mat affintrance = getRotationMatrix2D(center, angle, 1.0);
warpAffine(src, dst, affintrance, src.size(), INTER_CUBIC);
imshow("test", dst);
if (cvWaitKey(1000) >= 0) { break; }
//参考:https ://stackoverflow.com/questions/41750171/opencv-waitkey-function-always-returns-255-on-mac
}
return dst;
}
UMat OCV::Perspective()
{
Point2f dstPoint[4];
int xMergin, yMergin;
int x0 = src.cols / 4;
int x1 = (src.cols / 4) * 3;
int y0 = src.rows / 4;
int y1 = (src.rows / 4) * 3;
Point2f srcPoint[] = {
Point(x0, y0),
Point(x0, y1),
Point(x1, y1),
Point(x1, y0)
};
//乱数を使いたいときC++では<random>ヘッダを使用する。
//以下のように書いてdist(mt)を呼ぶと0~2がランダムで出力される。
//正直よくわからないので、参考サイトを要勉強(http://qiita.com/_EnumHack/items/25fc998873d1bc7d2276)
mt19937 mt{ random_device{}() };
uniform_int_distribution<int> dist(0, 2);
switch (dist(mt))
{
//前に倒した様な透視変換
case 0:
xMergin = src.cols / 10;
yMergin = src.rows / 10;
dstPoint[0] = Point(x0 + xMergin, y0 + yMergin);
dstPoint[1] = srcPoint[1];
dstPoint[2] = srcPoint[2];
dstPoint[3] = Point(x1 - xMergin, y0 + yMergin);
break;
//扉を開けた様な透視変換
case 1:
xMergin = src.cols / 8;
yMergin = src.rows / 8;
dstPoint[0] = srcPoint[0];
dstPoint[1] = srcPoint[1];
dstPoint[2] = Point(x1 - xMergin, y1 - yMergin);
dstPoint[3] = Point(x1 - xMergin, y0 + yMergin);
break;
//斜めに引き延ばす透視変換
case 2:
xMergin = src.cols / 6;
yMergin = src.rows / 6;
dstPoint[0] = Point(x0 + xMergin, y0 + yMergin);
dstPoint[1] = srcPoint[1];
dstPoint[2] = Point(x1 - xMergin, y1 - yMergin);
dstPoint[3] = srcPoint[3];
break;
default:
cerr << "設定されていない値が入力されました。" << endl;
break;
}
dst = NULL; //出力するdstは空じゃないとだめらしい
Mat perspectiveMmat = getPerspectiveTransform(srcPoint, dstPoint);
warpPerspective(src, dst, perspectiveMmat, src.size(), INTER_CUBIC);
return dst;
}
#pragma once
#include "OCV.h"
#include <random>
using namespace::std;
using namespace::cv;
using cv::UMat;
IColor::~IColor()
{
cout << "IColor破棄されたよ" << endl;
}
UMat OCV::toGrayscale()
{
//dst = src;
cvtColor(src, dst, COLOR_RGB2GRAY);
return dst;
}
UMat OCV::toBrightnessSmoothing()
{
cvtColor(src, src, COLOR_RGB2GRAY); //OpenCVで輝度平滑化できる画像はグレイスケール限定のため
equalizeHist(src, dst);
return dst;
}
UMat OCV::toThresholding()
{
double thresh = 60.0, maxval = 180.0;
int type = THRESH_BINARY;
cvtColor(src, src, COLOR_RGB2GRAY); //OpenCVで輝度平滑化できる画像はグレイスケール限定のため
equalizeHist(src, src);
mt19937 mt{ random_device{}() };
uniform_int_distribution<int> dist(0, 4);
switch (dist(mt))
{
case 0: type = THRESH_BINARY; break;
case 1: type = THRESH_BINARY_INV; break;
case 2: type = THRESH_TRUNC; break;
case 3: type = THRESH_TOZERO; break;
case 4: type = THRESH_TOZERO_INV; break;
}
threshold(src, dst, thresh, maxval, type);
return dst;
}
#pragma once
#pragma warning(disable:4819) //Unicode文字に変えろというエラーを黙らせる
#include <opencv2/opencv.hpp>
#pragma warning(default:4819)
#define DEBUG
#ifdef DEBUG
#pragma comment(lib, "opencv_world320d.lib")
#else
#pragma comment(lib, "opencv_world320.lib")
#endif
#pragma once
#include "OCV.h"
#include <random>
using namespace::std;
using namespace::cv;
using cv::UMat;
IFilter::~IFilter()
{
cout << "IFilter破棄されたよ" << endl;
}
UMat OCV::FripBit()
{
bitwise_not(src, dst);
return dst;
}
UMat OCV::toBlur()
{
int ksize = 11;
blur(src, dst, Size(ksize, ksize));
return dst;
}
UMat OCV::toGaussian()
{
int ksize1 = 11, ksize2 = 11;
double sigma1 = 10.0, sigma2 = 10.0;
GaussianBlur(src, dst, Size(ksize1, ksize2), sigma1, sigma2);
return dst;
}
UMat OCV::toLaplacian()
{
Laplacian(src, dst, 0);
return dst;
}
UMat OCV::toSobel()
{
Sobel(src, dst, -1, 0, 1);
return dst;
}
UMat OCV::toCanny()
{
double threshold1 = 40.0, threshold2 = 200.0;
cvtColor(src, dst, COLOR_RGB2GRAY);
Canny(src, dst, threshold1, threshold2);
return dst;
}
UMat OCV::ErosionOrDilation()
{
mt19937 mt{ random_device{}() };
uniform_int_distribution<int> dist(0, 1);
switch (dist(mt))
{
case 0: erode(src, dst, Mat()); break;
case 1: dilate(src, dst, Mat()); break;
}
return dst;
}
#pragma once
#include "OCV.h"
using namespace::std;
using namespace::cv;
using cv::UMat;
IGraphics::~IGraphics()
{
cout << "IGraphics破棄されたよ" << endl;
}
UMat OCV::drawCircle()
{
dst = src;
circle(dst, Point(200, 200), 50, Scalar(255, 0, 0), 2);
//circle(imgs[1], Point(200, 200), 100, Scalar(0, 255, 0), 20);
//circle(imgs[2], Point(200, 200), 150, Scalar(0, 0, 255), -1);
//ostringstream oss;
//oss << "CIRCLE" << i;
//string imgJpg = oss.str();
//imshow(imgJpg, imgs[i]);
return dst;
}
UMat OCV::drawLine()
{
dst = src;
int x0 = src.cols / 4;
int x1 = src.cols * 3 / 4;
int y0 = src.rows / 4;
int y1 = src.rows * 3 / 4;
Point p0 = Point(x0, y0);
Point p1 = Point(x1, y1);
dst = src;
line(dst, p0, p1, Scalar(0, 0, 255), 3, 4);
p0.y = y1;
p1.y = y0;
line(dst, p0, p1, Scalar(255, 0, 0), 3, 4);
return dst;
}
UMat OCV::drawEcllipse()
{
dst = src;
Point center = Point(src.cols / 2, src.rows / 2);
Size sz = Size(src.cols / 2, src.rows / 2);
ellipse(dst, center, sz, 0, 0, 360, Scalar(255, 0, 0), 3, 4);
sz.width -= 20;
sz.height -= 50;
ellipse(dst, center, sz, 15, 10, 360, Scalar(255, 255, 0), 2, 4);
return dst;
}
UMat OCV::drawRect()
{
dst = src;
Point p0 = Point(src.cols / 8, src.rows / 8);
Point p1 = Point(src.cols * 7 / 8, src.rows * 7 / 8);
rectangle(dst, p0, p1, Scalar(0, 255, 0), 5, 8);
Point p2 = Point(src.cols * 2 / 8, src.rows * 2 / 8);
Point p3 = Point(src.cols * 6 / 8, src.rows * 6 / 8);
rectangle(dst, p2, p3, Scalar(0, 255, 255), 2, 4);
return dst;
}
UMat OCV::drawText()
{
dst = src;
Point p = Point(50, src.rows / 2 - 50);
// 画像,文字,開始位置,フォント,大きさ,色,線幅,種類
putText(dst, "Hello OpenCV", p, FONT_HERSHEY_TRIPLEX, 0.8, Scalar(250, 200, 200), 2, CV_AA);
return dst;
}
#pragma once
#include "pch.h"
using cv::UMat;
class IAffine
{
public:
virtual ~IAffine();
public:
virtual UMat Frip() = 0;
virtual UMat Scaling() = 0;
virtual UMat Rotate() = 0;
virtual UMat RotateCont() = 0;
virtual UMat Perspective() = 0;
};
#pragma once
#include "pch.h"
using cv::UMat;
class IColor
{
public:
virtual ~IColor();
public:
virtual UMat toGrayscale() = 0;
virtual UMat toBrightnessSmoothing() = 0;
virtual UMat toThresholding() = 0;
};
#pragma once
#include "pch.h"
using cv::UMat;
class IFilter
{
public:
virtual ~IFilter();
public:
virtual UMat FripBit() = 0;
virtual UMat toBlur() = 0;
virtual UMat toGaussian() = 0;
virtual UMat toLaplacian() = 0;
virtual UMat toSobel() = 0;
virtual UMat toCanny() = 0;
virtual UMat ErosionOrDilation() = 0;
};
#pragma once
#include "pch.h"
using cv::UMat;
class IGraphics
{
public:
virtual ~IGraphics();
public:
virtual UMat drawCircle() = 0;
virtual UMat drawLine() = 0;
virtual UMat drawEcllipse() = 0;
virtual UMat drawRect() = 0;
virtual UMat drawText() = 0;
};
#pragma once
#include "pch.h"
using cv::UMat;
class IObjOperation
{
public:
virtual ~IObjOperation();
public:
virtual UMat DetectConers() = 0;
virtual UMat EliminateObj() = 0;
virtual UMat RepareDmg() = 0;
//virtual UMat DetectObj() = 0;
//virtual UMat DetectObjVideo() = 0; //割愛
virtual UMat DetectFeatureVal() = 0;
virtual UMat Stitching() = 0;
};
#pragma once
#include "OCV.h"
using namespace::std;
using namespace::cv;
void main(void)
{
char szDirectoryName[MAX_PATH];
GetCurrentDirectory(sizeof(szDirectoryName), szDirectoryName);
//static_cast<string>(szDirectoryName)でもキャストできる
//たぶん暗黙的か明示的かの違い
//下記のサイトの下の方に解説が載っている
//https://social.msdn.microsoft.com/Forums/ja-JP/5ee435a0-970a-40ef-85ad-1895e938c27d/stdstring?forum=vcgeneralja
const string path = (string)szDirectoryName + "\\D69N新諸元リア販社マーカー.bmp";
//const string path = (string)szDirectoryName + "\\Lenna.jpg";
cout << "Current Directory : " << szDirectoryName << endl;
try
{
UMat src, dst;
imread(path).copyTo(src);
if (src.empty())
throw ("開けなかったよ。");
imshow("Befor", src);
IObjOperation *img = new OCV(src);
dst = img->Stitching();
if (dst.empty() == false)
imshow("After", dst);
delete img;
img = nullptr;
//imwrite(path, aImg);
waitKey(0);
destroyAllWindows();
}
catch (const char* str)
{
cerr << str << endl;
getchar();
}
}
#pragma once
#include "OCV.h"
#include <random>
using namespace::std;
using namespace::cv;
using cv::UMat;
IObjOperation::~IObjOperation()
{
cout << "IObjOperation破棄されたよ" << endl;
}
UMat OCV::DetectConers()
{
UMat gray;
const int maxCorners = 50, blockSize = 3;
const double qualityLevel = 0.01, minDistance = 20.0, k = 0.04;
const bool useHarrisDetector = false;
vector< Point2f > corners;
dst = src.clone();
cvtColor(src, gray, COLOR_RGB2GRAY);
goodFeaturesToTrack(gray, corners, maxCorners, qualityLevel,
minDistance, UMat(), blockSize, useHarrisDetector, k);
for (size_t i = 0; i < corners.size(); i++)
circle(dst, corners[i], 8, Scalar(255, 255, 0), 2);
return dst;
}
UMat OCV::EliminateObj()
{
UMat mask;
Laplacian(src, mask, 0);
cvtColor(mask, mask, COLOR_RGB2GRAY);
imshow("mask", mask);
inpaint(src, mask, dst, 1, INPAINT_TELEA);
return dst;
}
UMat OCV::RepareDmg()
{
Mat gray, mask;
cvtColor(src, gray, COLOR_RGB2GRAY);
equalizeHist(gray, mask);
imshow("mask", mask);
threshold(mask, mask, 253, 1, CV_THRESH_BINARY);
inpaint(src, mask, dst, 3, INPAINT_TELEA);
return dst;
}
//UMat OCV::DetectObj()
//{
// UMat gray, equalize;
//
// cvtColor(src, gray, COLOR_RGB2GRAY);
// equalizeHist(gray, equalize);
//
// CascadeClassifier objDetector(argv[2]); // create detector
//
//
// vector<Rect> objs; // search objects
// objDetector.detectMultiScale(equalize, objs,
// 1.2, 2, CV_HAAR_SCALE_IMAGE, Size(30, 30));
//
// src.copyTo(dst); // draw results
// vector<Rect>::const_iterator it = objs.begin();
// for (; it != objs.end(); ++it)
// {
// rectangle(dst, Point(it->x, it->y),
// Point(it->x + it->width, it->y + it->height),
// Scalar(0, 0, 200), 2, CV_AA);
// }
//
// return dst;
//}
//UMat OCV::DetectObjVideo()
//{
// double threshold1 = 40.0, threshold2 = 200.0;
//
// cvtColor(src, dst, COLOR_RGB2GRAY);
// Canny(src, dst, threshold1, threshold2);
// return dst;
//}
UMat OCV::DetectFeatureVal()
{
UMat src2 = src.clone();
// detect the keypoints using AKAZE
vector<KeyPoint> keypoint1, keypoint2;
Ptr<FeatureDetector> detector = AKAZE::create();
detector->detect(src, keypoint1);
detector->detect(src2, keypoint2);
// calculate descriptors
Mat descriptor1, descriptor2;
Ptr<DescriptorExtractor> extractor = AKAZE::create();
extractor->compute(src, keypoint1, descriptor1);
extractor->compute(src2, keypoint2, descriptor2);
// matching descriptor vectors w/ brute force
vector<DMatch> matches;
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce");
matcher->match(descriptor1, descriptor2, matches);
// draw matches
drawMatches(src, keypoint1, src2, keypoint2, matches, dst);
return dst;
}
UMat OCV::Stitching()
{
vector<UMat> srcs;
UMat src2 = src.clone();
for (int i = 0; i < 1; i++)
{
if (i = 1) src = src2;
srcs.push_back(src);
imshow("src" + to_string(i), src);
}
Stitcher stt = Stitcher::createDefault();
Stitcher::Status status = stt.stitch(srcs, dst);
if (status != Stitcher::OK)
throw "faild at stitch.";
return dst;
}
#pragma once
#include "pch.h"
#include "IAffine.h"
#include "IGraphics.h"
#include "IColor.h"
#include "IFilter.h"
#include "IObjOperation.h"
using namespace::std;
using cv::UMat;
class OCV : public IAffine, public IGraphics, public IColor,
public IFilter, public IObjOperation
{
public:
OCV(UMat base)
{
src = base;
}
~OCV()
{
cout << "OCV破棄されたよ" << endl;
}
private:
UMat src, dst, affintrance;
//アフィン変換
UMat Frip() override;
UMat Scaling() override;
UMat Rotate() override;
UMat RotateCont() override;
UMat Perspective() override;
//描画処理
UMat drawCircle() override;
UMat drawLine() override;
UMat drawEcllipse() override;
UMat drawRect() override;
UMat drawText() override;
//色の処理
UMat toGrayscale() override;
UMat toBrightnessSmoothing() override;
UMat toThresholding() override;
//フィルタ処理
UMat FripBit() override;
UMat toBlur() override;
UMat toGaussian() override;
UMat toLaplacian() override;
UMat toSobel() override;
UMat toCanny() override;
UMat ErosionOrDilation() override;
//画像合成
//割愛
//動画処理
//割愛
//オブジェクト処理
UMat DetectConers() override;
UMat EliminateObj() override;
UMat RepareDmg() override;
//UMat DetectObj() override; //割愛
//UMat DetectObjVideo() override; //割愛
UMat DetectFeatureVal() override;
UMat Stitching() override;
};
#pragma once
#include "pch.h"
//参考:http://bituse.info/c/33
#pragma once
//以下に共通して読み込みたいヘッダをincludeする
#include "common.h"
#include <string>
#include <iostream>
#include <Windows.h>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment