Created
April 21, 2016 14:50
-
-
Save kalineh/79ec2476e90787e2330cab67e273f36c to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using UnityEngine; | |
using System.Collections; | |
using Valve.VR; | |
using Valve.Interop; | |
// bool VR_IVRTrackedCamera_HasCamera(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_GetCameraFirmwareDescription(IntPtr instancePtr, uint nDeviceIndex, string pBuffer, uint nBufferLen); | |
// bool VR_IVRTrackedCamera_GetCameraFrameDimensions(IntPtr instancePtr, uint nDeviceIndex, ECameraVideoStreamFormat nVideoStreamFormat, ref uint pWidth, ref uint pHeight); | |
// bool VR_IVRTrackedCamera_SetCameraVideoStreamFormat(IntPtr instancePtr, uint nDeviceIndex, ECameraVideoStreamFormat nVideoStreamFormat); | |
// ECameraVideoStreamFormat VR_IVRTrackedCamera_GetCameraVideoStreamFormat(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_EnableCameraForStreaming(IntPtr instancePtr, uint nDeviceIndex, bool bEnable); | |
// bool VR_IVRTrackedCamera_StartVideoStream(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_StopVideoStream(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_IsVideoStreamActive(IntPtr instancePtr, uint nDeviceIndex); | |
// float VR_IVRTrackedCamera_GetVideoStreamElapsedTime(IntPtr instancePtr, uint nDeviceIndex); | |
// IntPtr VR_IVRTrackedCamera_GetVideoStreamFrame(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_ReleaseVideoStreamFrame(IntPtr instancePtr, uint nDeviceIndex, ref CameraVideoStreamFrame_t pFrameImage); | |
// bool VR_IVRTrackedCamera_SetAutoExposure(IntPtr instancePtr, uint nDeviceIndex, bool bEnable); | |
// bool VR_IVRTrackedCamera_SupportsPauseResume(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_PauseVideoStream(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_ResumeVideoStream(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_IsVideoStreamPaused(IntPtr instancePtr, uint nDeviceIndex); | |
// bool VR_IVRTrackedCamera_GetCameraDistortion(IntPtr instancePtr, uint nDeviceIndex, float flInputU, float flInputV, ref float pflOutputU, ref float pflOutputV); | |
// bool VR_IVRTrackedCamera_GetCameraProjection(IntPtr instancePtr, uint nDeviceIndex, float flWidthPixels, float flHeightPixels, float flZNear, float flZFar, ref HmdMatrix44_t pProjection); | |
public class ViveCameraTest | |
: MonoBehaviour | |
{ | |
//private Texture2D texture; | |
private CVRTrackedCamera trackedCamera; | |
void Start() | |
{ | |
} | |
void Init() | |
{ | |
var error = EVRInitError.None; | |
var pTrackedCamera = OpenVR.GetGenericInterface(OpenVR.IVRTrackedCamera_Version, ref error); | |
if (pTrackedCamera == System.IntPtr.Zero) | |
return; | |
if (error != EVRInitError.None) | |
return; | |
trackedCamera = new CVRTrackedCamera(pTrackedCamera); | |
} | |
void Update() | |
{ | |
if (!Input.GetKey(KeyCode.Space)) | |
return; | |
if (!SteamVR.active) | |
return; | |
if (trackedCamera == null) | |
Init(); | |
/* | |
//var hasCamera = VRNativeEntrypoints.VR_IVRTrackedCamera_HasCamera(pTrackedCamera, 0); | |
//var isActive = VRNativeEntrypoints.VR_IVRTrackedCamera_IsVideoStreamActive(pTrackedCamera, 0); | |
//var format = VRNativeEntrypoints.VR_IVRTrackedCamera_GetCameraVideoStreamFormat(pTrackedCamera, 0); | |
//var frame = VRNativeEntrypoints.VR_IVRTrackedCamera_GetVideoStreamFrame(pTrackedCamera, 0); | |
var firmwareDesc = new string(' ', 32); | |
var dimFormat = ECameraVideoStreamFormat.CVS_FORMAT_NV12; | |
var dimWidth = (uint)0; | |
var dimHeight = (uint)0; | |
var index = (uint)0; | |
var hasCamera = trackedCamera.HasCamera(index); | |
var validFirmwareDesc = trackedCamera.GetCameraFirmwareDescription(index, firmwareDesc, (uint)firmwareDesc.Length); | |
var validFrameDimensions = trackedCamera.GetCameraFrameDimensions(index, dimFormat, ref dimWidth, ref dimHeight); | |
//var validSetStreamFormat = trackedCamera.SetCameraVideoStreamFormat(index, dimFormat); | |
var streamFormat = trackedCamera.GetCameraVideoStreamFormat(index); | |
var isActive = trackedCamera.IsVideoStreamActive(index); | |
var elapsed = trackedCamera.GetVideoStreamElapsedTime(index); | |
//var frame = trackedCamera.GetVideoStreamFrame(index); | |
var supportPause = trackedCamera.SupportsPauseResume(index); | |
var isPaused = trackedCamera.IsVideoStreamPaused(index); | |
if (!isActive) | |
return; | |
if (isPaused) | |
{ | |
var frame = trackedCamera.GetVideoStreamFrame(index); | |
var freed = trackedCamera.ReleaseVideoStreamFrame(index, ref frame); | |
var resume = true; | |
if (resume) | |
{ | |
var changed = trackedCamera.SetCameraVideoStreamFormat(index, ECameraVideoStreamFormat.CVS_FORMAT_RAW10); | |
var resumed = trackedCamera.ResumeVideoStream(index); | |
//var changed2 = trackedCamera.SetCameraVideoStreamFormat(index, ECameraVideoStreamFormat.CVS_FORMAT_RAW10); | |
//trackedCamera.SetCameraVideoStreamFormat(index, dimFormat); | |
//frame = trackedCamera.GetVideoStreamFrame(index); | |
} | |
return; | |
} | |
//var width = (int)frame.m_nWidth; | |
//var height = (int)frame.m_nHeight; | |
//var active = trackedCamera.IsVideoStreamActive(index); | |
//var enabled = trackedCamera.EnableCameraForStreaming(index, true); | |
//Debug.LogFormat("width: {0}, height: {1}, active: {2}, enabled: {3}", width, height, active, enabled); | |
//Debug.LogFormat("Format: {0}", m_nStreamFormat.ToString())); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nWidth)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nHeight)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nFrameSequence)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nTimeStamp)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nBufferIndex)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nBufferCount)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_nImageDataSize)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_flFrameElapsedTime)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_flFrameCaptureTime)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_bPoseIsValid)); | |
//Debug.LogFormat("{0}", string.Format("{0}", m_matDeviceToAbsoluteTracking)); | |
return; | |
*/ | |
/* | |
if (!texture) | |
texture = new Texture2D(width, height); | |
var size = frame.m_nImageDataSize; | |
var data = frame.m_pImageData; | |
var copied = new byte[size]; | |
var colors32 = new Color32[size]; | |
Debug.LogFormat("frame: {0}", size); | |
if (size < 8) | |
return; | |
//System.Runtime.InteropServices.Marshal.Copy(data, copied, 0, (int)size); | |
System.Runtime.InteropServices.Marshal.Copy(data, copied, 0, 0); | |
//for (int i = 0; i < 128; i += 4) | |
//{ | |
//colors32[i].r = copied[i + 0]; | |
//colors32[i].g = copied[i + 1]; | |
//colors32[i].b = copied[i + 2]; | |
//colors32[i].a = copied[i + 3]; | |
//} | |
//texture.SetPixels32(colors32); | |
*/ | |
} | |
} | |
Status API Training Shop Blog About | |
© 2016 GitHub, Inc. Terms Privacy Security Contact Help |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment