Skip to content

Instantly share code, notes, and snippets.

@recuraki
Created December 16, 2018 12:29
Show Gist options
  • Select an option

  • Save recuraki/b5176c4bcfd0c8d68b0d43c700e13291 to your computer and use it in GitHub Desktop.

Select an option

Save recuraki/b5176c4bcfd0c8d68b0d43c700e13291 to your computer and use it in GitHub Desktop.
各RGBチャネルにdelayを入れて、Webcamからの入力を画面に表示する
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using OpenCvSharp;
using OpenCvSharp.Extensions;
using System.Threading;
// https://minus9d.hatenablog.com/entry/20130204/1359989829
// http://schima.hatenablog.com/entry/2014/01/30/105406
namespace WpfApp1
{
/// <summary>
/// MainWindow.xaml の相互作用ロジック
/// </summary>
public partial class MainWindow : System.Windows.Window
{
List<Mat> bufferLayerB = new List<Mat>();
List<Mat> bufferLayerG = new List<Mat>();
List<Mat> bufferLayerR = new List<Mat>();
Mat zeros;
public bool IsExitCapture { get; set; }
public MainWindow()
{
this.InitializeComponent();
}
/// <summary>
/// カメラ画像を取得して次々に表示を切り替える
/// </summary>
public virtual void Capture(object state)
{
var camera = new VideoCapture(0/*0番目のデバイスを指定*/)
{
// キャプチャする画像のサイズフレームレートの指定
FrameWidth = 1920,
FrameHeight = 1000,
// Fps = 60
};
using (var img = new Mat()) // 撮影した画像を受ける変数
using (camera)
{
var blackimg = new Mat(camera.FrameHeight, camera.FrameWidth, MatType.CV_8UC1, new Scalar(0, 0, 0));
var splitBlackImg = Cv2.Split(blackimg);
zeros = splitBlackImg[0];
var video = new VideoWriter("d:\a.avi", 0, camera.Fps, new OpenCvSharp.Size(camera.FrameWidth, camera.FrameHeight));
while (true)
{
if (this.IsExitCapture)
{
this.Dispatcher.Invoke(() => this._Image.Source = null);
break;
}
camera.Read(img); // Webカメラの読み取り(バッファに入までブロックされる
if (img.Empty())
{
break;
}
var splitImg = Cv2.Split(img);
var layerB = splitImg[0];
var layerG = splitImg[1];
var layerR = splitImg[2];
bufferLayerB.Add(layerB);
bufferLayerG.Add(layerG);
bufferLayerR.Add(layerR);
Mat[] dstLayers = new Mat[3];
Mat dst = new Mat();
dstLayers[2] = bufferLayerB[0];
dstLayers[1] = bufferLayerG[0];
dstLayers[0] = bufferLayerR[0];
bufferLayerB.RemoveAt(0);
bufferLayerG.RemoveAt(0);
bufferLayerR.RemoveAt(0);
Cv2.Merge(dstLayers, dst);
this.Dispatcher.Invoke(() =>
{
//video.Write(dst);
this._Image.Source = dst.ToWriteableBitmap();
// this._Image.Source = img.ToWriteableBitmap();
});
}
video.Dispose();
}
}
// ---- EventHandlers ----
/// <summary>
/// Windowがロードされた時
/// </summary>
private void Window_Loaded(object sender, RoutedEventArgs e)
{
ThreadPool.QueueUserWorkItem(this.Capture);
}
/// <summary>
/// Exit Captureボタンが押され時
/// </summary>
protected virtual void Button_Click(object sender, RoutedEventArgs e)
{
this.IsExitCapture = true;
}
private void SliderR_ValueChanged(object sender, RoutedPropertyChangedEventArgs<double> e)
{
int delayR = (int)sliderR.Value;
bufferLayerR = new List<Mat>();
for (int i = 0; i < delayR; i++)
{
bufferLayerR.Add(zeros);
}
}
private void SliderG_ValueChanged(object sender, RoutedPropertyChangedEventArgs<double> e)
{
int delayG = (int)sliderG.Value;
bufferLayerG = new List<Mat>();
for (int i = 0; i < delayG; i++)
{
bufferLayerG.Add(zeros);
}
}
private void SliderB_ValueChanged(object sender, RoutedPropertyChangedEventArgs<double> e)
{
int delayB = (int)sliderB.Value;
bufferLayerB = new List<Mat>();
for (int i = 0; i < delayB; i++)
{
bufferLayerB.Add(zeros);
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment