2016-08-03 10 views
1

Ich versuche, einen grundlegenden Test mit dem Oculus Ich benutze das PC SDK 1.6, OpenGL und Qt.Schwarzer Bildschirm mit Oculus PC SDK mit OpenGL und Qt

Ich begann zunächst mit https://developer.oculus.com/documentation/pcsdk/latest/concepts/dg-render/

Dann lookd i auf verschiedenen Thema, aber ich kann nicht herausfinden, was ich falsch gemacht, sondern vor allem, wo sind meine Fehler ..

Es Nähte der initialize Teil richtig ist. Das Oculus zeigt die Gesundheitswarnung an, aber alles, was ich sehe, ist ein schwarzer Bildschirm, sobald die Gesundheitswarnung verschwindet.

si Hier den Code i verwenden:

OculusManager.h

#ifndef OCULUSMANAGER_H 
#define OCULUSMANAGER_H 

#include "main.h" 
#include <OVR_CAPI_GL.h> 
#include <QtGui/qopengl.h> 
#include <QOpenGLFunctions_4_3_Compatibility> 

class OculusManager : protected QOpenGLFunctions_4_3_Compatibility 
{ 
public: 
    OculusManager(); 
    bool initLibOVR(); 
    void displayLastError(); 
    void run(); 
    void shutdown(); 
    void destroy(); 

private: 

    bool mainLoop(bool value); 

    ovrSession session; 
    ovrGraphicsLuid luid; 
    bool isOVRLibLoaded = false; 
    ovrHmdDesc hmdDesc; 
    ovrSizei bufferSize; 
    ovrTextureSwapChain textureSwapChain; 
    ovrLayerEyeFov layer; 
    // Initialize VR structures, filling out description. 
    ovrEyeRenderDesc eyeRenderDesc[2]; 
    ovrVector3f  hmdToEyeViewOffset[2]; 
    ovrVector3f hmdToEyeOffset[2]; 
    double sensorSampleTime; 
    bool isVisible = true; 
    // it needs to be updated each new frame 
    long long frameIndex = 0; 
    // Initialize a default Pose 
    ovrPosef eyeRenderPose[2]; 

    bool end = false; 

    void openglError(); 
}; 

#endif // OCULUSMANAGER_H 

OculusManager.cpp

#include "oculusmanager.h" 


OculusManager::OculusManager() 
{ 
} 


bool OculusManager::initLibOVR(){ 

    if(initializeOpenGLFunctions() == false){ 
     qDebug() << "Impossible to load OpenGL functions"; 
     return false; 
    } 

    glEnable(GL_TEXTURE_2D); 
    glEnable(GL_DEPTH_TEST); 
    // Accept fragment if it closer to the camera than the former one 
    glDepthFunc(GL_LESS); 
    glEnable(GL_CULL_FACE); 

    glShadeModel(GL_SMOOTH);      // Enable Smooth Shading 
    glClearDepth(1.0f);       // Depth Buffer Setup 
    glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);   // Really Nice Perspective Calculations 


    //background color 
    glClearColor(0., 0., 0., 1.0); 


    if(OVR_FAILURE(ovr_Initialize(nullptr))) 
    { 
     return false; 
    } 


    if (OVR_FAILURE(ovr_Create(&session, &luid))) 
    { 
     return false; 
    } 

    //main code 

    hmdDesc = ovr_GetHmdDesc(session); 

    ovrSizei recommenedTex0Size = ovr_GetFovTextureSize(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0], 1.0f); 
    ovrSizei recommenedTex1Size = ovr_GetFovTextureSize(session, ovrEye_Right,hmdDesc.DefaultEyeFov[1], 1.0f); 

    bufferSize; 
    bufferSize.w = recommenedTex0Size.w + recommenedTex1Size.w; 
    bufferSize.h = std::max(recommenedTex0Size.h, recommenedTex1Size.h); 


    ovrTextureSwapChainDesc ovrTextdesc = {}; 
    ovrTextdesc.Type = ovrTexture_2D; 
    ovrTextdesc.ArraySize = 1; 
    ovrTextdesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB; 
    ovrTextdesc.Width = bufferSize.w; 
    ovrTextdesc.Height = bufferSize.h; 
    ovrTextdesc.MipLevels = 1; 
    ovrTextdesc.SampleCount = 1; 
    ovrTextdesc.StaticImage = ovrFalse; 



    if(OVR_SUCCESS(ovr_CreateTextureSwapChainGL(session, &ovrTextdesc, &textureSwapChain))) 
    { 
     int length = 0; 
     if (OVR_FAILURE(ovr_GetTextureSwapChainLength(session, textureSwapChain, &length))){ 
      return false; 
     } 

     for (int i = 0; i < length; ++i) 
     { 
      GLuint chainTexId; 
      if (OVR_FAILURE(ovr_GetTextureSwapChainBufferGL(session, textureSwapChain, i, &chainTexId))){ 
       return false; 
      } 

      glBindTexture(GL_TEXTURE_2D, chainTexId); 

      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 

     } 


     // Generate frame buffer to render 
     GLuint fboID; 
     glGenFramebuffers(1, &fboID); 
     // Generate depth buffer of the frame buffer 
     GLuint depthBuffID; 
     glGenTextures(1, &depthBuffID); 
     glBindTexture(GL_TEXTURE_2D, depthBuffID); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 
     GLenum internalFormat = GL_DEPTH_COMPONENT24; 
     GLenum type = GL_UNSIGNED_INT; 
     glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, bufferSize.w, bufferSize.h, 0, GL_DEPTH_COMPONENT, type, NULL); 


     // FloorLevel will give tracking poses where the floor height is 0 
     ovr_SetTrackingOriginType(session, ovrTrackingOrigin_FloorLevel); 

     eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); 
     eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); 
     hmdToEyeViewOffset[0] = eyeRenderDesc[0].HmdToEyeOffset; 
     hmdToEyeViewOffset[1] = eyeRenderDesc[1].HmdToEyeOffset; 

     // Initialize our single full screen Fov layer. 

     layer.Header.Type  = ovrLayerType_EyeFov; 
     layer.Header.Flags  = 0; 
     layer.ColorTexture[0] = textureSwapChain; 
     layer.ColorTexture[1] = textureSwapChain; 
     layer.Fov[0]   = eyeRenderDesc[0].Fov; 
     layer.Fov[1]   = eyeRenderDesc[1].Fov; 

     ovrSizei rectsize; 
     rectsize.w = bufferSize.w/2; 
     rectsize.h = bufferSize.h; 

     ovrVector2i rectPosL; 
     rectPosL.x = 0; 
     rectPosL.y = 0; 

     ovrVector2i rectPosR; 
     rectPosR.x = bufferSize.w/2; 
     rectPosR.y = 0; 

     ovrRecti rectLeft, rectRight; 
     rectLeft.Pos = rectPosL; 
     rectLeft.Size = rectsize; 

     rectRight.Pos = rectPosR; 
     rectRight.Size = rectsize; 


     layer.Viewport[0] = rectLeft; 
     layer.Viewport[1] = rectRight; 
     // ld.RenderPose and ld.SensorSampleTime are updated later per frame. 

     isOVRLibLoaded = true; 

    } 


    return isOVRLibLoaded; 

} 


void OculusManager::run(){ 

    mainLoop(true); 
} 

bool OculusManager::mainLoop(bool value){ 


    // Get both eye poses simultaneously, with IPD offset already included. 
    double displayMidpointSeconds = ovr_GetPredictedDisplayTime(session, 0); 

    ovrTrackingState hmdState = ovr_GetTrackingState(session, displayMidpointSeconds, ovrTrue); 
    ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose); 


    openglError(); 

    // qDebug() << hmdState.HeadPose.ThePose.Position.x << hmdState.HeadPose.ThePose.Position.y << hmdState.HeadPose.ThePose.Position.z; 

    // Get texture swap index where we must draw our frame 
    GLuint curTexId; 
    // Get next available index of the texture swap chain 
    int currentIndex = 0; 
    ovr_GetTextureSwapChainCurrentIndex(session, textureSwapChain, &currentIndex); 
    ovr_GetTextureSwapChainBufferGL(session, textureSwapChain, currentIndex, &curTexId); 


    // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc, as the returned values (e.g. HmdToEyeOffset) may change at runtime. 
    eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); 
    eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); 
    hmdToEyeOffset[0] = eyeRenderDesc[0].HmdToEyeOffset; 
    hmdToEyeOffset[1] = eyeRenderDesc[1].HmdToEyeOffset; 
    // Get eye poses, feeding in correct IPD offset 
    ovr_GetEyePoses(session, frameIndex, ovrTrue, hmdToEyeOffset, eyeRenderPose, &sensorSampleTime); 

    if (isVisible) 
    { 

     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0); 
     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, 0, 0); 
     // Clear the frame buffer 

     // Render Scene to Eye Buffers 
     for (int eye = 0; eye < 2; eye++) 
     { 

      // Set the left or right vertical half of the buffer as the viewport 
      glViewport(eye == ovrEye_Left ? 0 : bufferSize.w/2, 0, bufferSize.w/2, bufferSize.h); 
      glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, 0); 

     } 

     // Do not forget to increment the frameIndex! 
     frameIndex++; 

     // Commit the changes to the texture swap chain 
     ovr_CommitTextureSwapChain(session, textureSwapChain); 
    } 


    layer.Header.Type = ovrLayerType_EyeFov; 
    // Tell to the Oculus compositor that our texture origin is at the bottom left 
    layer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL | Disable head tracking 
    // Set the Oculus layer eye field of view for each view 
    for (int eye = 0; eye < 2; ++eye) 
    { 
     // Set the color texture as the current swap texture 
     layer.ColorTexture[eye] = textureSwapChain; 
     // Set the viewport as the right or left vertical half part of the color texture 
     ovrRecti rect; 
     rect.Pos.x = eye == ovrEye_Left ? 0 : bufferSize.w/2; 
     rect.Pos.y = 0; 
     rect.Size.h = bufferSize.h; 
     rect.Size.w = bufferSize.w/2; 
     layer.Viewport[eye] =rect; 
     // Set the field of view 
     layer.Fov[eye] = hmdDesc.DefaultEyeFov[eye]; 
     // Set the pose matrix 
     layer.RenderPose[eye] = eyeRenderPose[eye]; 
    } 

    layer.SensorSampleTime = sensorSampleTime; 

    // Submit frame with one layer we have. 
    ovrLayerHeader* layers = &layer.Header; 
    ovrResult  result = ovr_SubmitFrame(session, 0, nullptr, &layers, 1); 
    if(OVR_FAILURE(result)){ 
     qDebug() << "ovr_submitFrame failed"; 
    } 
    isVisible = (result == ovrSuccess); 

    //TODO if response is lost, destroy everything here 
    //TODO if response is lost, destroy everything here 
    //TODO if response is lost, destroy everything here 
    //TODO if response is lost, destroy everything here 

    // This is not really needed for this application but it may be usefull for an more advanced application 
    ovrSessionStatus sessionStatus; 
    ovr_GetSessionStatus(session, &sessionStatus); 
    if (sessionStatus.ShouldRecenter) 
    { 
     qDebug() << "Recenter Tracking asked by Session" ; 
     ovr_RecenterTrackingOrigin(session); 
    } 

    return true; 

} 

void OculusManager::openglError(){ 
    GLenum error = glGetError(); 
    while(error !=0){ 
     qDebug() << "error gl : " << error; 
     error = glGetError(); 
    } 
} 

void OculusManager::displayLastError(){ 
    ovrErrorInfo errorInfo; 
    ovr_GetLastErrorInfo(&errorInfo); 
    qDebug() << "Oculus Manager error : " << errorInfo.ErrorString; 
} 

void OculusManager::shutdown(){ 
    ovr_Shutdown(); 
} 

void OculusManager::destroy(){ 
    ovr_Destroy(session); 
} 

Thx im Voraus für Ihre Hilfe

Mögliche doppelte aber nicht tat hat mir geholfen:

Black Screen when rendering with Oculus SDK

Oculus 0.8 SDK Black Screen

Antwort

0

glClearColor(0., 0., 0., 1.0);

Sie sollten versuchen, die klare Farbe auf etwas anderes als schwarz Einstellung, so dass Sie wissen, ob der Framebuffer überhaupt vorgelegt. Wenn Sie "rot" sagen und die HMD immer noch schwarz ist, dann wissen Sie, dass der Framebuffer nicht da ist. Wenn die Anzeige rot ist, aber alles, was Sie rendern, fehlt, dann wissen Sie, dass der Rendering-Code falsch ist.

Apropos, Sie löschen den Framebuffer nie wirklich ... Ihr Beispielcode enthält glClear nirgendwo. Sie haben gerade einen Kommentar

// löschen Sie den Framebuffer

aber Sie nicht wirklich tun. Es ist also durchaus möglich, dass Ihr glDrawElements nichts tut, weil der Tiefentest für jedes Fragment fehlschlägt.

0

Wenn Sie dies noch nicht gelöst haben, müssen Sie die von GetTextureSwapChainBufferGL empfangenen Texturen als Framebuffer mit glFramebufferTexture2D setzen. Also muss man set framebufferTexture2D zweimal jedes Mal aufrufen, eins für jede Textur-ID (eins für links und eins für rechts). Momentan zeichnest du nichts zu den Texturen, die du von der Funktion bekommen hast, und so bleiben sie schwarz und das bekommst du.

Hoffe, das hilft.