Skip to content

Instantly share code, notes, and snippets.

@jmorrill
Last active January 30, 2018 08:37
Show Gist options
  • Save jmorrill/3b0d3483135ca1ba62ef2d2d60ba1eb2 to your computer and use it in GitHub Desktop.
Save jmorrill/3b0d3483135ca1ba62ef2d2d60ba1eb2 to your computer and use it in GitHub Desktop.
using SharpDX.Direct3D9;
using SharpDX.Multimedia;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Interop;
using Windows.Devices.Enumeration;
using Windows.Graphics.Imaging;
using Windows.Media.Capture;
using Windows.Media.Capture.Frames;
using Windows.Media.MediaProperties;
using Windows.Storage.Streams;
using Windows.UI.Xaml.Controls;
using SharpDX.Mathematics.Interop;
using IntPtr = System.IntPtr;
namespace CaptureIRFrames
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
bool m_initialized;
int ImageWidthPixels;
int ImageHeightPixels;
int once = 0;
public MainWindow()
{
InitializeComponent();
}
protected override async void OnActivated(EventArgs e)
{
if (m_initialized)
{
return; // Already initialized
}
m_initialized = true;
var allGroups = await MediaFrameSourceGroup.FindAllAsync();
var capture = new MediaCapture();
await capture.InitializeAsync(new MediaCaptureInitializationSettings
{
//there are several sourcegroups on my surfacebook. Find the correct one with the IR stream
SourceGroup = allGroups[1],
// This media capture can share streaming with other apps.
SharingMode = MediaCaptureSharingMode.SharedReadOnly,
// Only stream video and don't initialize audio capture devices.
StreamingCaptureMode = StreamingCaptureMode.Video,
// Set to CPU to ensure frames always contain CPU SoftwareBitmap images
// instead of preferring GPU D3DSurface images.
MemoryPreference = MediaCaptureMemoryPreference.Cpu
});
foreach (MediaFrameSource source in capture.FrameSources.Values)
{
MediaFrameSourceKind kind = source.Info.SourceKind;
if (kind == MediaFrameSourceKind.Infrared)
{
// Look for a format which the FrameRenderer can render.
string requestedSubtype = null;
foreach (MediaFrameFormat format in source.SupportedFormats)
{
Debug.WriteLine(format.Subtype);
//for this sample only L8 and L16 are considered. There are more formats probably coming from the camera.
requestedSubtype = GetSubtypeForFrameReader(kind, format);
if (format.Subtype == "NV12")
{
requestedSubtype = "NV12";
Debug.WriteLine(format.VideoFormat.Width);
//await source.SetFormatAsync(format);
break;
}
// if (requestedSubtype != null)
// {
// // Tell the source to use the format we can render.
// await source.SetFormatAsync(format);
// break;
// }
}
if (requestedSubtype == null)
{
// No acceptable format was found. Ignore this source.
continue;
}
MediaFrameReader frameReader = await capture.CreateFrameReaderAsync(source, requestedSubtype);
frameReader.FrameArrived += FrameReader_FrameArrived;
MediaFrameReaderStartStatus status = await frameReader.StartAsync();
//do proper error handling etc.
}
}
}
private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
{
if (once != 0)
{
return;
}
//once++;
// TryAcquireLatestFrame will return the latest frame that has not yet been acquired.
// This can return null if there is no such frame, or if the reader is not in the
// "Started" state. The latter can occur if a FrameArrived event was in flight
// when the reader was stopped.
using (var frame = sender.TryAcquireLatestFrame())
{
if (frame != null)
{
//DO YOUR FRAME MAGIC HERE
DoFrameMagic(frame);
}
}
}
public static string GetSubtypeForFrameReader(MediaFrameSourceKind kind, MediaFrameFormat format)
{
// Note that media encoding subtypes may differ in case.
// https://docs.microsoft.com/en-us/uwp/api/Windows.Media.MediaProperties.MediaEncodingSubtypes
string subtype = format.Subtype;
switch (kind)
{
// For color sources, we accept anything and request that it be converted to Bgra8.
case MediaFrameSourceKind.Color:
return MediaEncodingSubtypes.Bgra8;
// The only depth format we can render is D16.
case MediaFrameSourceKind.Depth:
return String.Equals(subtype, "D16", StringComparison.OrdinalIgnoreCase) ? subtype : null;
// The only infrared formats we can render are L8 and L16.Added NV12
case MediaFrameSourceKind.Infrared:
return (String.Equals(subtype, "L8", StringComparison.OrdinalIgnoreCase) ||
String.Equals(subtype, "NV12", StringComparison.OrdinalIgnoreCase) ||
String.Equals(subtype, "L16", StringComparison.OrdinalIgnoreCase)) ? subtype : null;
// No other source kinds are supported by this class.
default:
return null;
}
}
[DllImport("user32.dll", SetLastError = false)]
private static extern IntPtr GetDesktopWindow();
[DllImport("kernel32.dll", EntryPoint = "CopyMemory", SetLastError = false)]
private static extern void CopyMemory(IntPtr dest, IntPtr src, uint count);
[ComImport]
[Guid("905a0fef-bc53-11df-8c49-001e4fc686da")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
/* This interface gives pointer access to the IBuffer. Seems to
only be exposed to native languages */
interface IBufferByteAccess
{
void Buffer(out IntPtr buffer);
}
private Direct3DEx _d3dEx;
private DeviceEx _d3dDevice;
private Surface _nv12Surface;
private Surface _renderTarget;
/// <summary>
/// Makes sure we have a D3D device.
/// </summary>
private void EnsureDirect3D()
{
if (_d3dEx == null)
{
_d3dEx = new Direct3DEx();
}
if (_d3dDevice == null)
{
/* We use the desktop handle because it always exists
and the hwnd interop helper may not have an hwnd when we need it here
Requiring an hwnd for d3d device is a dumb dx9 thing */
IntPtr windowHandle = GetDesktopWindow();
//Create a device. Using standard creation param.
var presentationParams = new PresentParameters(1, 1);
_d3dDevice = new DeviceEx(_d3dEx,
0,
DeviceType.Hardware,
windowHandle,
CreateFlags.FpuPreserve | CreateFlags.HardwareVertexProcessing | CreateFlags.Multithreaded,
presentationParams);
}
}
/// <summary>
/// Make sure we have D3D surfaces we need at the size we need.
/// If the size changes, we dispose the old one and create a new one.
/// </summary>
/// <param name="width"></param>
/// <param name="height"></param>
private void EnsureD3DSurfaces(int width, int height)
{
/* If null or surface size doesn't match requested... */
if (_nv12Surface == null ||
(width != _nv12Surface.Description.Width ||
height != _nv12Surface.Description.Height))
{
_nv12Surface?.Dispose();
/* Format here is non-obvious, but this is similar to how it is in the native DX SDK */
var nv12Format = (Format)(int)new FourCC('N', 'V', '1', '2');
/* Start off by creating a new GPU surface, using the NV12 color space */
_nv12Surface = Surface.CreateOffscreenPlain(_d3dDevice, width, height, nv12Format, Pool.Default);
}
/* If null or surface size doesn't match requested... */
if (_renderTarget == null ||
(width != _renderTarget.Description.Width ||
height != _renderTarget.Description.Height))
{
_renderTarget?.Dispose();
/* Pretty standard X8R8G8B8 surface render target */
_renderTarget = Surface.CreateRenderTarget(_d3dDevice, width, height, Format.X8R8G8B8, MultisampleType.None, 0, true);
}
}
public void DoFrameMagic(MediaFrameReference frame)
{
/* Check this flag unless you want the video to flicker */
if (!frame.VideoMediaFrame.InfraredMediaFrame.IsIlluminated)
{
return;
}
EnsureDirect3D();
ImageWidthPixels = (int)frame.Format.VideoFormat.Width;
ImageHeightPixels = (int)frame.Format.VideoFormat.Height;
EnsureD3DSurfaces(ImageWidthPixels, ImageHeightPixels);
var buffer = frame.BufferMediaFrame;
var sourceBuffer = buffer.Buffer;
/* This needs to happen somewhere or we stop getting new frames */
frame.VideoMediaFrame.SoftwareBitmap.Dispose();
/* Use COM interop to get the real pointer to the IBuffer */
var ba = sourceBuffer as IBufferByteAccess;
ba.Buffer(out var pSource);
/* Make sure to release ref or you'll have to wait for gc
to do this, which can be a long time/never */
Marshal.ReleaseComObject(ba);
/* This maps the GPU memory aka gives us a pointer to our GPU surface bytes */
var data = _nv12Surface.LockRectangle(LockFlags.None);
/* Copy directly from IBuffer to the GPU surface.
We use "1.5" on the height multiplier because the second
plane on NV12 is packed UV, which takes up half the space */
for (int line = 0; line < ImageHeightPixels * 1.5; line++)
{
/* Using p/invoke here because none of the Marshal overloads do what we want (pointer to pointer copy). */
CopyMemory(data.DataPointer + (data.Pitch * line), /* Offset a stride at a time. first loop iteration will be Pointer + 0 */
pSource + (line * ImageWidthPixels), /* Source data we copy a source pixelwidth at a time */
(uint)ImageWidthPixels); /* just copy width size as strides may be different from source and dest */
}
/* unmap the gpu surface */
_nv12Surface.UnlockRectangle();
/* Magic happens here. This does the auto color conversion! */
_d3dDevice.StretchRectangle(_nv12Surface, _renderTarget, TextureFilter.None);
Dispatcher.Invoke(new Action(()=>
{
try
{
this.wpfImageSource.Lock();
this.wpfImageSource.SetBackBuffer(D3DResourceType.IDirect3DSurface9, _renderTarget.NativePointer);
this.wpfImageSource.AddDirtyRect(new Int32Rect(0, 0, wpfImageSource.PixelWidth, wpfImageSource.PixelHeight));
}
finally
{
this.wpfImageSource.Unlock();
}
}));
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment