Skip to content

Instantly share code, notes, and snippets.

@jherico
Created September 8, 2015 00:52
Show Gist options
  • Save jherico/4b7f1ea79b7c9d87c84f to your computer and use it in GitHub Desktop.
Save jherico/4b7f1ea79b7c9d87c84f to your computer and use it in GitHub Desktop.
#ifndef OCULUSGLWIDGET_H
#define OCULUSGLWIDGET_H
#include <QOpenGLFunctions>
#include <QGLWidget>
#include <QGLFramebufferObject>
#include <Extras/OVR_Math.h>
#include <OVR_CAPI_GL.h>
class OculusGLWidget : public QGLWidget {
Q_OBJECT
public:
OculusGLWidget(QWidget* parent = nullptr);
~OculusGLWidget();
protected:
virtual void initializeGL();
virtual void resizeGL(int width, int height);
virtual void paintGL();
virtual void keyPressEvent(QKeyEvent *event);
private:
QImage myImg;
GLuint myTex = 0;
static const int IMG_COUNT = 1000;
struct ImgInfo {
float yawVelocity;
OVR::Vector3f pos;
} imgInfos[IMG_COUNT];
void drawScene(const double &dt);
float bodyYaw = 0;
OVR::Vector3f bodyPos = OVR::Vector3f(0, 0, 0);
ovrHmd Hmd;
ovrResult result;
ovrHmdDesc HmdDesc = {};
// Make eye render buffers
ovrVector3f ViewOffset[2];
ovrPosef EyeRenderPose[2];
QGLFramebufferObject *offFB = NULL;
ovrGLTexture eyeTextures[2] = {};
ovrEyeRenderDesc EyeRenderDesc[2];
OVR::Matrix4f Projection[2];
double tStart = 0;
};
#endif // OCULUSGLWIDGET_H
#include "OculusGLWidget.h"
#include <iostream>
#include <cmath>
#include <random>
#include <chrono>
#include <QTimer>
#include <QKeyEvent>
#include <QNetworkReply>
#include <QPainter>
#include <QLinearGradient>
static const GLfloat SPACE_DEPTH = 25; // 25 meters
#define OVR_ASSERT Q_ASSERT
using namespace OVR;
QOpenGLFunctions* GL{ nullptr };
struct DepthBuffer
{
GLuint texId;
DepthBuffer(Sizei size, int sampleCount)
{
OVR_ASSERT(sampleCount <= 1); // The code doesn't currently handle MSAA textures.
glGenTextures(1, &texId);
glBindTexture(GL_TEXTURE_2D, texId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
GLenum internalFormat = GL_DEPTH_COMPONENT24;
GLenum type = GL_UNSIGNED_INT;
glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, size.w, size.h, 0, GL_DEPTH_COMPONENT, type, NULL);
}
};
struct TextureBuffer
{
ovrSwapTextureSet* TextureSet;
GLuint texId;
GLuint fboId;
Sizei texSize;
TextureBuffer(ovrHmd hmd, bool rendertarget, bool displayableOnHmd, OVR::Sizei size, int mipLevels, unsigned char * data, int sampleCount)
{
OVR_ASSERT(sampleCount <= 3); // The code doesn't currently handle MSAA textures.
texSize = size;
if (displayableOnHmd) {
// This texture isn't necessarily going to be a rendertarget, but it usually is.
OVR_ASSERT(hmd); // No HMD? A little odd.
OVR_ASSERT(sampleCount == 1); // ovr_CreateSwapTextureSetD3D11 doesn't support MSAA.
ovr_CreateSwapTextureSetGL(hmd, GL_BGR, size.w, size.h, &TextureSet);
for (int i = 0; i < TextureSet->TextureCount; ++i)
{
ovrGLTexture* tex = (ovrGLTexture*)&TextureSet->Textures[i];
glBindTexture(GL_TEXTURE_2D, tex->OGL.TexId);
if (rendertarget)
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
}
else
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
}
}
}
else {
glGenTextures(1, &texId);
glBindTexture(GL_TEXTURE_2D, texId);
if (rendertarget)
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
}
else
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
}
//gluBuild2DMipmaps(GL_TEXTURE_2D, 0, texSize.w, texSize.h, GL_RGB, GL_UNSIGNED_BYTE, data);
glTexImage2D(GL_TEXTURE_2D, 0, 3, texSize.w, texSize.h, 0, GL_BGR, GL_UNSIGNED_BYTE, data);
}
if (mipLevels > 1)
{
GL->glGenerateMipmap(GL_TEXTURE_2D);
}
GL->glGenFramebuffers(1, &fboId);
}
Sizei GetSize(void) const
{
return texSize;
}
void SetAndClearRenderSurface(DepthBuffer * dbuffer)
{
ovrGLTexture* tex = (ovrGLTexture*)&TextureSet->Textures[TextureSet->CurrentIndex];
GL->glBindFramebuffer(GL_FRAMEBUFFER, fboId);
GL->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex->OGL.TexId, 0);
GL->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, dbuffer->texId, 0);
glViewport(0, 0, texSize.w, texSize.h);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
void UnsetRenderSurface()
{
GL->glBindFramebuffer(GL_FRAMEBUFFER, fboId);
GL->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
GL->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
}
};
DepthBuffer * eyeDepthBuffer[2];
TextureBuffer * eyeRenderTexture[2];
ovrHmdDesc HmdDesc;
OculusGLWidget::OculusGLWidget(QWidget* parent) : QGLWidget(parent) {
format().setDepth(false);
setAutoBufferSwap(false);
setFocusPolicy(Qt::StrongFocus);
// Initialise rift
if (ovr_Initialize(nullptr) != ovrSuccess) { MessageBoxA(NULL, "Unable to initialize libOVR.", "", MB_OK); }
//ovrHmd HMD;
ovrGraphicsLuid luid;
result = ovr_Create(&Hmd, &luid);
if (!OVR_SUCCESS(result)) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); ovr_Shutdown(); }
HmdDesc = ovr_GetHmdDesc(Hmd);
if (HmdDesc.ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);
bodyPos.y = ovr_GetFloat(Hmd, OVR_KEY_EYE_HEIGHT, 1.70);
{ // randomly distribute images
std::mt19937 generator(std::chrono::system_clock::now().time_since_epoch().count());
std::uniform_real_distribution<float> distriPos(-SPACE_DEPTH, SPACE_DEPTH);
std::uniform_real_distribution<float> distriYawVelocity(-90, 90); // max: 90 degrees/second
for (int i = 0; i < IMG_COUNT; ++i) {
imgInfos[i].yawVelocity = distriYawVelocity(generator);
do {
imgInfos[i].pos = OVR::Vector3f(distriPos(generator), distriPos(generator), distriPos(generator));
} while ((imgInfos[i].pos - bodyPos).Length() < 1);
}
}
{ // load image from web
auto createTextImage = [](const QString &text) {
static const int w = 256, h = 256;
QImage img(w, h, QImage::Format_ARGB32);
QPainter p(&img);
p.fillRect(0, 0, w, h, QColor(64, 0, 0));
QLinearGradient lg(0, 0, 0, h);
lg.setColorAt(0, QColor(64, 64, 64));
lg.setColorAt(1, QColor(16, 16, 16));
p.fillRect(QRectF(0, 0, w, h), lg);
p.setPen(QColor(255, 255, 255));
p.setFont(QFont("Helvetica", w / 10));
p.drawText(0, 0, w, h, Qt::AlignHCenter | Qt::AlignVCenter, text);
return img;
};
myImg = createTextImage("Loading...");
QNetworkAccessManager *nam = new QNetworkAccessManager(this);
connect(nam, &QNetworkAccessManager::finished, [&](QNetworkReply *reply) {
if (!myImg.loadFromData(reply->readAll())) {
std::cerr << "Could not load image." << std::endl;
myImg = createTextImage("Error!");
}
makeCurrent();
deleteTexture(myTex);
myTex = 0;
reply->deleteLater();
});
nam->get(QNetworkRequest(QUrl("http://dbvc4uanumi2d.cloudfront.net/cdn/3.6.7/wp-content/themes/oculus/img/rift/lowLatencyHeadTracking.jpg")));
}
}
OculusGLWidget::~OculusGLWidget() {
makeCurrent();
deleteTexture(myTex);
delete offFB;
if (Hmd) ovr_Destroy(Hmd);
ovr_Shutdown();
}
void OculusGLWidget::initializeGL() {
auto context = QOpenGLContext::currentContext();
GL = new QOpenGLFunctions(context);
GL->initializeOpenGLFunctions();
// Start the sensor
ovr_ConfigureTracking(Hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
ovrTrackingCap_Position, 0);
OVR::Sizei recommenedTexSize[] = {
ovr_GetFovTextureSize(Hmd, ovrEye_Left, HmdDesc.DefaultEyeFov[0], 1),
ovr_GetFovTextureSize(Hmd, ovrEye_Right, HmdDesc.DefaultEyeFov[1], 1)
};
QGLFramebufferObjectFormat ff;
ff.setAttachment(QGLFramebufferObject::Depth);
offFB = new QGLFramebufferObject(recommenedTexSize[0].w + recommenedTexSize[1].w, recommenedTexSize[0].h, ff);
glBindTexture(GL_TEXTURE_2D, offFB->texture());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
/*
for (int i = 0; i < 2; ++i) {
eyeTextures[i].OGL.Header.API = ovrRenderAPI_OpenGL;
eyeTextures[i].OGL.Header.TextureSize.h = offFB->height();
eyeTextures[i].OGL.Header.TextureSize.w = offFB->width();
eyeTextures[i].OGL.Header.RenderViewport.Pos.x = i == 0 ? 0 : recommenedTexSize[0].w;
eyeTextures[i].OGL.Header.RenderViewport.Pos.y = 0;
eyeTextures[i].OGL.Header.RenderViewport.Size = recommenedTexSize[i];
eyeTextures[i].OGL.TexId = offFB->texture();
} */
for (int i = 0; i<2; i++)
{
ovrSizei idealTextureSize = ovr_GetFovTextureSize(Hmd, (ovrEyeType)i, HmdDesc.DefaultEyeFov[i], 1);
eyeRenderTexture[i] = new TextureBuffer(Hmd, true, true, idealTextureSize, 1, NULL, 1);
eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i]->GetSize(), 0);
}
// Create mirror texture and an FBO used to copy mirror texture to back buffer
ovrGLTexture* mirrorTexture;
ovr_CreateMirrorTextureGL(Hmd, GL_RGBA, recommenedTexSize[0].w + recommenedTexSize[1].w, recommenedTexSize[0].h, (ovrTexture**)&mirrorTexture);
// Configure the mirror read buffer
GLuint mirrorFBO = 0;
GL->glGenFramebuffers(1, &mirrorFBO);
GL->glBindFramebuffer(GL_READ_FRAMEBUFFER, mirrorFBO);
GL->glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mirrorTexture->OGL.TexId, 0);
GL->glFramebufferRenderbuffer(GL_READ_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, 0);
GL->glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
ovrEyeRenderDesc EyeRenderDesc[2];
EyeRenderDesc[0] = ovr_GetRenderDesc(Hmd, ovrEye_Left, HmdDesc.DefaultEyeFov[0]);
EyeRenderDesc[1] = ovr_GetRenderDesc(Hmd, ovrEye_Right, HmdDesc.DefaultEyeFov[1]);
// Turn off vsync to let the compositor do its magic
//GL->wglSwapIntervalEXT(0);
QTimer *t = new QTimer(this);
connect(t, &QTimer::timeout, this, &QGLWidget::updateGL);
t->start();
}
void OculusGLWidget::resizeGL(int width, int height) {
// ovrGLConfig config = {};
// config.Header.API = ovrRenderAPI_OpenGL;
// config.Header.RTSize = OVR::Sizei(width, height);
//unsigned int distortionCaps = ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp;
//if (!ovr_ConfigureRendering(Hmd, &config, distortionCaps, HmdDesc.DefaultEyeFov, EyeRenderDesc)) {
// std::cerr << "ovr_ConfigureRendering" << std::endl;
// exit(1);
//}
//Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov, .1f, 1000.0f, true);
//Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov, .1f, 1000.0f, true);
}
void OculusGLWidget::paintGL() {
// Keyboard inputs to adjust player orientation
static float Yaw(3.141592f);
//ovrFrameTiming HmdFrameTiming = ovr_BeginFrame(Hmd, 0);
//OVR::SensorState ss = ovr_GetSensorState(Hmd, HmdFrameTiming.ScanoutMidpointSeconds);
//if (tStart == 0) tStart = ss.Predicted.TimeInSeconds;
//const double dt = ss.Predicted.TimeInSeconds - tStart;
const double dt = 30;
//offFB->bind();
glEnable(GL_DEPTH_TEST);
#if 1
glClearColor(0.1, 0.0, 0.1, 0.0);
#else // be aware of advanced data like this
const auto &v = ss.Predicted.AngularVelocity;
glClearColor(fabs(v.x)*0.2, fabs(v.y)*0.2, fabs(v.z)*0.2, 0.0);
#endif
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
//using namespace OVR;
const Quatf bodyQ(Vector3f(0, 1, 0), bodyYaw);
/*
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
glViewport(eyeTextures[eye].OGL.Header.RenderViewport.Pos.x,
eyeTextures[eye].OGL.Header.RenderViewport.Pos.y,
eyeTextures[eye].OGL.Header.RenderViewport.Size.w,
eyeTextures[eye].OGL.Header.RenderViewport.Size.h);
ovrPosef eyeRenderPose = ovr_BeginEyeRender(Hmd, eye);
*/
// Get eye poses, feeding in correct IPD offset
ViewOffset[0] = EyeRenderDesc[0].HmdToEyeViewOffset;
ViewOffset[1] = EyeRenderDesc[1].HmdToEyeViewOffset;
ovrPosef EyeRenderPose[2];
ovrFrameTiming ftiming = ovr_GetFrameTiming(Hmd, 0);
ovrTrackingState hmdState = ovr_GetTrackingState(Hmd, ftiming.DisplayMidpointSeconds);
ovr_CalcEyePoses(hmdState.HeadPose.ThePose, ViewOffset, EyeRenderPose);
static Vector3f Pos2(0.0f, 1.6f, 0.0f);
// Set up positional data.
ovrViewScaleDesc viewScaleDesc;
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = ViewOffset[0];
viewScaleDesc.HmdToEyeViewOffset[1] = ViewOffset[1];
ovrLayerEyeFov ld;
ld.Header.Type = ovrLayerType_EyeFov;
ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL.
for (int eye = 0; eye < 2; eye++)
{
ld.ColorTexture[eye] = eyeRenderTexture[eye]->TextureSet;
ld.Viewport[eye] = Recti(eyeRenderTexture[eye]->GetSize());
ld.Fov[eye] = HmdDesc.DefaultEyeFov[eye];
ld.RenderPose[eye] = EyeRenderPose[eye];
}
ovrLayerHeader* layers = &ld.Header;
Matrix4f view;
Matrix4f proj;
for (int eye = 0; eye<2; eye++)
{
// Switch to eye render target
eyeRenderTexture[eye]->SetAndClearRenderSurface(eyeDepthBuffer[eye]);
// Get view and projection matrices
Matrix4f rollPitchYaw = Matrix4f::RotationY(Yaw);
Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(EyeRenderPose[eye].Orientation);
Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
Vector3f shiftedEyePos = Pos2 + rollPitchYaw.Transform(EyeRenderPose[eye].Position);
view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
proj = ovrMatrix4f_Projection(HmdDesc.DefaultEyeFov[eye], 0.2f, 1000.0f, ovrProjection_RightHanded);
glMatrixMode(GL_PROJECTION);
glLoadMatrixf((GLfloat*)proj.M);
glMatrixMode(GL_MODELVIEW);
// Render world
//roomScene.Render(view, proj);
drawScene(dt);
// Avoids an error when calling SetAndClearRenderSurface during next iteration.
// Without this, during the next while loop iteration SetAndClearRenderSurface
// would bind a framebuffer with an invalid COLOR_ATTACHMENT0 because the texture ID
// associated with COLOR_ATTACHMENT0 had been unlocked by calling wglDXUnlockObjectsNV.
eyeRenderTexture[eye]->UnsetRenderSurface();
}
//const Transformf worldPose(bodyQ*eyeRenderPose.Orientation, bodyPos + bodyQ.Rotate(eyeRenderPose.Position));
//const static Vector3f UpVector(0.0f, 1.0f, 0.0f);
//const static Vector3f ForwardVector(0.0f, 0.0f, -1.0f);
//const Matrix4f view = Matrix4f::LookAtRH(worldPose.Translation, worldPose.Translation + worldPose.Rotation.Rotate(ForwardVector), worldPose.Rotation.Rotate(UpVector));
const Matrix4f viewProj = proj * view;
GL->glUseProgram(0); // Fixed function pipeline
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(&viewProj.Transposed().M[0][0]);
glMatrixMode(GL_MODELVIEW);
//ovr_SubmitFrame(Hmd);
//ovr_EndEyeRender(Hmd, eye, eyeRenderPose, &eyeTextures[eye].Texture);
ovrResult result = ovr_SubmitFrame(Hmd, 0, &viewScaleDesc, &layers, 1);
for (int eye = 0; eye<2; eye++) {
// Increment to use next texture, just before writing
eyeRenderTexture[eye]->TextureSet->CurrentIndex = (eyeRenderTexture[eye]->TextureSet->CurrentIndex + 1) % eyeRenderTexture[eye]->TextureSet->TextureCount;
}
}
void OculusGLWidget::drawScene(const double &dt) {
if (myImg.isNull()) return;
if (!myTex) myTex = bindTexture(myImg, GL_TEXTURE_2D, GL_RGBA, QGLContext::LinearFilteringBindOption | QGLContext::InvertedYBindOption);
glBindTexture(GL_TEXTURE_2D, myTex);
const GLfloat aspect = (GLfloat)myImg.width() / myImg.height();
auto drawRect = []() {
glBegin(GL_TRIANGLE_STRIP);
glTexCoord2f(0, 0); glVertex3f(-0.5, -0.5, 0);
glTexCoord2f(1, 0); glVertex3f(0.5, -0.5, 0);
glTexCoord2f(0, 1); glVertex3f(-0.5, 0.5, 0);
glTexCoord2f(1, 1); glVertex3f(0.5, 0.5, 0);
glEnd();
};
for (int i = 0; i < IMG_COUNT; ++i) { // rotating images
const ImgInfo &info = imgInfos[i];
glLoadIdentity();
glTranslatef(info.pos.x, info.pos.y, info.pos.z);
glRotatef(dt*info.yawVelocity, 0, 1, 0);
static const GLfloat imgWidth = 2; // 2 meters wide
glScalef(imgWidth, imgWidth / aspect, 1);
drawRect();
}
{ // ground
glLoadIdentity();
glTranslatef(0, 0, -SPACE_DEPTH / aspect*0.5);
glRotatef(-90, 1, 0, 0);
glScalef(SPACE_DEPTH, SPACE_DEPTH / aspect, 1);
drawRect();
}
}
void OculusGLWidget::keyPressEvent(QKeyEvent *event) {
switch (event->key()) {
case Qt::Key_R:
//ovr_ResetSensor(Hmd);
break;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment