mirror of
https://github.com/DrBeef/ioq3quest.git
synced 2024-11-10 06:41:58 +00:00
OpenXR renderer in progress
This commit is contained in:
parent
e7a2229edc
commit
7b9d51ec30
7 changed files with 762 additions and 70 deletions
|
@ -28,6 +28,7 @@ add_dependencies(main copy_libs)
|
|||
|
||||
target_include_directories(main PRIVATE
|
||||
${CMAKE_SOURCE_DIR}/code/OpenXR-SDK/include
|
||||
${CMAKE_SOURCE_DIR}/code/OpenXR/Include
|
||||
${CMAKE_SOURCE_DIR}/code/SDL2/include
|
||||
${CMAKE_SOURCE_DIR}/code)
|
||||
|
||||
|
|
|
@ -298,8 +298,6 @@ static void InitOpenGL( void )
|
|||
|
||||
// set default state
|
||||
GL_SetDefaultState();
|
||||
|
||||
VR_ReInitRenderer();
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
555
android/app/src/main/cpp/code/vr/ovr_renderer.inl
Normal file
555
android/app/src/main/cpp/code/vr/ovr_renderer.inl
Normal file
|
@ -0,0 +1,555 @@
|
|||
#include "vr_types.h"
|
||||
|
||||
/************************************************************************************
|
||||
|
||||
Original file name : XrCompositor_NativeActivity.c
|
||||
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
*************************************************************************************/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdbool.h>
|
||||
#include <string.h>
|
||||
#include <math.h>
|
||||
#include <time.h>
|
||||
#include <unistd.h>
|
||||
#include <pthread.h>
|
||||
#include <sys/prctl.h>
|
||||
#include <android/log.h>
|
||||
#include <assert.h>
|
||||
|
||||
#if !defined(EGL_OPENGL_ES3_BIT_KHR)
|
||||
#define EGL_OPENGL_ES3_BIT_KHR 0x0040
|
||||
#endif
|
||||
|
||||
// EXT_texture_border_clamp
|
||||
#ifndef GL_CLAMP_TO_BORDER
|
||||
#define GL_CLAMP_TO_BORDER 0x812D
|
||||
#endif
|
||||
|
||||
#ifndef GL_TEXTURE_BORDER_COLOR
|
||||
#define GL_TEXTURE_BORDER_COLOR 0x1004
|
||||
#endif
|
||||
|
||||
#if !defined(GL_EXT_multisampled_render_to_texture)
|
||||
typedef void(GL_APIENTRY* PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC)(
|
||||
GLenum target,
|
||||
GLsizei samples,
|
||||
GLenum internalformat,
|
||||
GLsizei width,
|
||||
GLsizei height);
|
||||
typedef void(GL_APIENTRY* PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC)(
|
||||
GLenum target,
|
||||
GLenum attachment,
|
||||
GLenum textarget,
|
||||
GLuint texture,
|
||||
GLint level,
|
||||
GLsizei samples);
|
||||
#endif
|
||||
|
||||
// GL_EXT_texture_cube_map_array
|
||||
#if !defined(GL_TEXTURE_CUBE_MAP_ARRAY)
|
||||
#define GL_TEXTURE_CUBE_MAP_ARRAY 0x9009
|
||||
#endif
|
||||
|
||||
|
||||
#define MATH_PI 3.14159265358979323846f
|
||||
|
||||
#define GL(func) func;
|
||||
#define OXR(func) func;
|
||||
#define OVR_LOG_TAG "XrCompositor"
|
||||
|
||||
#define ALOGE(...) Com_Printf(__VA_ARGS__)
|
||||
#define ALOGV(...) Com_Printf(__VA_ARGS__)
|
||||
|
||||
static const int CPU_LEVEL = 2;
|
||||
static const int GPU_LEVEL = 3;
|
||||
static const int NUM_MULTI_SAMPLES = 4;
|
||||
|
||||
typedef union {
|
||||
XrCompositionLayerProjection Projection;
|
||||
XrCompositionLayerQuad Quad;
|
||||
XrCompositionLayerCylinderKHR Cylinder;
|
||||
XrCompositionLayerCubeKHR Cube;
|
||||
XrCompositionLayerEquirect2KHR Equirect2;
|
||||
} ovrCompositorLayer_Union;
|
||||
|
||||
/*
|
||||
================================================================================
|
||||
|
||||
OpenGL-ES Utility Functions
|
||||
|
||||
================================================================================
|
||||
*/
|
||||
|
||||
static const char* EglErrorString(const EGLint error) {
|
||||
switch (error) {
|
||||
case EGL_SUCCESS:
|
||||
return "EGL_SUCCESS";
|
||||
case EGL_NOT_INITIALIZED:
|
||||
return "EGL_NOT_INITIALIZED";
|
||||
case EGL_BAD_ACCESS:
|
||||
return "EGL_BAD_ACCESS";
|
||||
case EGL_BAD_ALLOC:
|
||||
return "EGL_BAD_ALLOC";
|
||||
case EGL_BAD_ATTRIBUTE:
|
||||
return "EGL_BAD_ATTRIBUTE";
|
||||
case EGL_BAD_CONTEXT:
|
||||
return "EGL_BAD_CONTEXT";
|
||||
case EGL_BAD_CONFIG:
|
||||
return "EGL_BAD_CONFIG";
|
||||
case EGL_BAD_CURRENT_SURFACE:
|
||||
return "EGL_BAD_CURRENT_SURFACE";
|
||||
case EGL_BAD_DISPLAY:
|
||||
return "EGL_BAD_DISPLAY";
|
||||
case EGL_BAD_SURFACE:
|
||||
return "EGL_BAD_SURFACE";
|
||||
case EGL_BAD_MATCH:
|
||||
return "EGL_BAD_MATCH";
|
||||
case EGL_BAD_PARAMETER:
|
||||
return "EGL_BAD_PARAMETER";
|
||||
case EGL_BAD_NATIVE_PIXMAP:
|
||||
return "EGL_BAD_NATIVE_PIXMAP";
|
||||
case EGL_BAD_NATIVE_WINDOW:
|
||||
return "EGL_BAD_NATIVE_WINDOW";
|
||||
case EGL_CONTEXT_LOST:
|
||||
return "EGL_CONTEXT_LOST";
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
static const char* GlFrameBufferStatusString(GLenum status) {
|
||||
switch (status) {
|
||||
case GL_FRAMEBUFFER_UNDEFINED:
|
||||
return "GL_FRAMEBUFFER_UNDEFINED";
|
||||
case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
|
||||
return "GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT";
|
||||
case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
|
||||
return "GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT";
|
||||
case GL_FRAMEBUFFER_UNSUPPORTED:
|
||||
return "GL_FRAMEBUFFER_UNSUPPORTED";
|
||||
case GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE:
|
||||
return "GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE";
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
================================================================================
|
||||
|
||||
ovrFramebuffer
|
||||
|
||||
================================================================================
|
||||
*/
|
||||
|
||||
static void ovrFramebuffer_Clear(ovrFramebuffer* frameBuffer) {
|
||||
frameBuffer->Width = 0;
|
||||
frameBuffer->Height = 0;
|
||||
frameBuffer->Multisamples = 0;
|
||||
frameBuffer->TextureSwapChainLength = 0;
|
||||
frameBuffer->TextureSwapChainIndex = 0;
|
||||
frameBuffer->ColorSwapChain.Handle = XR_NULL_HANDLE;
|
||||
frameBuffer->ColorSwapChain.Width = 0;
|
||||
frameBuffer->ColorSwapChain.Height = 0;
|
||||
frameBuffer->ColorSwapChainImage = NULL;
|
||||
frameBuffer->DepthBuffers = NULL;
|
||||
frameBuffer->FrameBuffers = NULL;
|
||||
}
|
||||
|
||||
static GLboolean ovrFramebuffer_Create(
|
||||
XrSession session,
|
||||
ovrFramebuffer* frameBuffer,
|
||||
const GLenum colorFormat,
|
||||
const int width,
|
||||
const int height,
|
||||
const int multisamples) {
|
||||
PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC glRenderbufferStorageMultisampleEXT =
|
||||
(PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC)eglGetProcAddress(
|
||||
"glRenderbufferStorageMultisampleEXT");
|
||||
PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC glFramebufferTexture2DMultisampleEXT =
|
||||
(PFNGLFRAMEBUFFERTEXTURE2DMULTISAMPLEEXTPROC)eglGetProcAddress(
|
||||
"glFramebufferTexture2DMultisampleEXT");
|
||||
|
||||
frameBuffer->Width = width;
|
||||
frameBuffer->Height = height;
|
||||
frameBuffer->Multisamples = multisamples;
|
||||
|
||||
GLenum requestedGLFormat = colorFormat;
|
||||
|
||||
// Get the number of supported formats.
|
||||
uint32_t numInputFormats = 0;
|
||||
uint32_t numOutputFormats = 0;
|
||||
OXR(xrEnumerateSwapchainFormats(session, numInputFormats, &numOutputFormats, NULL));
|
||||
|
||||
// Allocate an array large enough to contain the supported formats.
|
||||
numInputFormats = numOutputFormats;
|
||||
int64_t* supportedFormats = (int64_t*)malloc(numOutputFormats * sizeof(int64_t));
|
||||
if (supportedFormats != NULL) {
|
||||
OXR(xrEnumerateSwapchainFormats(
|
||||
session, numInputFormats, &numOutputFormats, supportedFormats));
|
||||
}
|
||||
|
||||
// Verify the requested format is supported.
|
||||
uint64_t selectedFormat = 0;
|
||||
for (uint32_t i = 0; i < numOutputFormats; i++) {
|
||||
if (supportedFormats[i] == requestedGLFormat) {
|
||||
selectedFormat = supportedFormats[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
free(supportedFormats);
|
||||
|
||||
if (selectedFormat == 0) {
|
||||
ALOGE("Format not supported");
|
||||
}
|
||||
|
||||
XrSwapchainCreateInfo swapChainCreateInfo;
|
||||
memset(&swapChainCreateInfo, 0, sizeof(swapChainCreateInfo));
|
||||
swapChainCreateInfo.type = XR_TYPE_SWAPCHAIN_CREATE_INFO;
|
||||
swapChainCreateInfo.usageFlags =
|
||||
XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT;
|
||||
swapChainCreateInfo.format = selectedFormat;
|
||||
swapChainCreateInfo.sampleCount = 1;
|
||||
swapChainCreateInfo.width = width;
|
||||
swapChainCreateInfo.height = height;
|
||||
swapChainCreateInfo.faceCount = 1;
|
||||
swapChainCreateInfo.arraySize = 1;
|
||||
swapChainCreateInfo.mipCount = 1;
|
||||
|
||||
// Enable Foveation on this swapchain
|
||||
XrSwapchainCreateInfoFoveationFB swapChainFoveationCreateInfo;
|
||||
memset(&swapChainFoveationCreateInfo, 0, sizeof(swapChainFoveationCreateInfo));
|
||||
swapChainFoveationCreateInfo.type = XR_TYPE_SWAPCHAIN_CREATE_INFO_FOVEATION_FB;
|
||||
swapChainCreateInfo.next = &swapChainFoveationCreateInfo;
|
||||
|
||||
frameBuffer->ColorSwapChain.Width = swapChainCreateInfo.width;
|
||||
frameBuffer->ColorSwapChain.Height = swapChainCreateInfo.height;
|
||||
|
||||
// Create the swapchain.
|
||||
OXR(xrCreateSwapchain(session, &swapChainCreateInfo, &frameBuffer->ColorSwapChain.Handle));
|
||||
// Get the number of swapchain images.
|
||||
OXR(xrEnumerateSwapchainImages(
|
||||
frameBuffer->ColorSwapChain.Handle, 0, &frameBuffer->TextureSwapChainLength, NULL));
|
||||
// Allocate the swapchain images array.
|
||||
frameBuffer->ColorSwapChainImage = (XrSwapchainImageOpenGLESKHR*)malloc(
|
||||
frameBuffer->TextureSwapChainLength * sizeof(XrSwapchainImageOpenGLESKHR));
|
||||
|
||||
// Populate the swapchain image array.
|
||||
for (uint32_t i = 0; i < frameBuffer->TextureSwapChainLength; i++) {
|
||||
frameBuffer->ColorSwapChainImage[i].type = XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR;
|
||||
frameBuffer->ColorSwapChainImage[i].next = NULL;
|
||||
}
|
||||
OXR(xrEnumerateSwapchainImages(
|
||||
frameBuffer->ColorSwapChain.Handle,
|
||||
frameBuffer->TextureSwapChainLength,
|
||||
&frameBuffer->TextureSwapChainLength,
|
||||
(XrSwapchainImageBaseHeader*)frameBuffer->ColorSwapChainImage));
|
||||
|
||||
frameBuffer->DepthBuffers =
|
||||
(GLuint*)malloc(frameBuffer->TextureSwapChainLength * sizeof(GLuint));
|
||||
frameBuffer->FrameBuffers =
|
||||
(GLuint*)malloc(frameBuffer->TextureSwapChainLength * sizeof(GLuint));
|
||||
|
||||
for (uint32_t i = 0; i < frameBuffer->TextureSwapChainLength; i++) {
|
||||
// Create the color buffer texture.
|
||||
const GLuint colorTexture = frameBuffer->ColorSwapChainImage[i].image;
|
||||
|
||||
GLenum colorTextureTarget = GL_TEXTURE_2D;
|
||||
GL(glBindTexture(colorTextureTarget, colorTexture));
|
||||
GL(glTexParameteri(colorTextureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE));
|
||||
GL(glTexParameteri(colorTextureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE));
|
||||
GL(glTexParameteri(colorTextureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
|
||||
GL(glTexParameteri(colorTextureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
|
||||
GL(glBindTexture(colorTextureTarget, 0));
|
||||
|
||||
if (multisamples > 1 && glRenderbufferStorageMultisampleEXT != NULL &&
|
||||
glFramebufferTexture2DMultisampleEXT != NULL) {
|
||||
// Create multisampled depth buffer.
|
||||
GL(glGenRenderbuffers(1, &frameBuffer->DepthBuffers[i]));
|
||||
GL(glBindRenderbuffer(GL_RENDERBUFFER, frameBuffer->DepthBuffers[i]));
|
||||
GL(glRenderbufferStorageMultisampleEXT(
|
||||
GL_RENDERBUFFER, multisamples, GL_DEPTH_COMPONENT24, width, height));
|
||||
GL(glBindRenderbuffer(GL_RENDERBUFFER, 0));
|
||||
|
||||
// Create the frame buffer.
|
||||
// NOTE: glFramebufferTexture2DMultisampleEXT only works with GL_FRAMEBUFFER.
|
||||
GL(glGenFramebuffers(1, &frameBuffer->FrameBuffers[i]));
|
||||
GL(glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer->FrameBuffers[i]));
|
||||
GL(glFramebufferTexture2DMultisampleEXT(
|
||||
GL_FRAMEBUFFER,
|
||||
GL_COLOR_ATTACHMENT0,
|
||||
GL_TEXTURE_2D,
|
||||
colorTexture,
|
||||
0,
|
||||
multisamples));
|
||||
GL(glFramebufferRenderbuffer(
|
||||
GL_FRAMEBUFFER,
|
||||
GL_DEPTH_ATTACHMENT,
|
||||
GL_RENDERBUFFER,
|
||||
frameBuffer->DepthBuffers[i]));
|
||||
GL(GLenum renderFramebufferStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER));
|
||||
GL(glBindFramebuffer(GL_FRAMEBUFFER, 0));
|
||||
if (renderFramebufferStatus != GL_FRAMEBUFFER_COMPLETE) {
|
||||
ALOGE(
|
||||
"Incomplete frame buffer object: %s",
|
||||
GlFrameBufferStatusString(renderFramebufferStatus));
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// Create depth buffer.
|
||||
GL(glGenRenderbuffers(1, &frameBuffer->DepthBuffers[i]));
|
||||
GL(glBindRenderbuffer(GL_RENDERBUFFER, frameBuffer->DepthBuffers[i]));
|
||||
GL(glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, width, height));
|
||||
GL(glBindRenderbuffer(GL_RENDERBUFFER, 0));
|
||||
|
||||
// Create the frame buffer.
|
||||
GL(glGenFramebuffers(1, &frameBuffer->FrameBuffers[i]));
|
||||
GL(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frameBuffer->FrameBuffers[i]));
|
||||
GL(glFramebufferRenderbuffer(
|
||||
GL_DRAW_FRAMEBUFFER,
|
||||
GL_DEPTH_ATTACHMENT,
|
||||
GL_RENDERBUFFER,
|
||||
frameBuffer->DepthBuffers[i]));
|
||||
GL(glFramebufferTexture2D(
|
||||
GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorTexture, 0));
|
||||
GL(GLenum renderFramebufferStatus = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER));
|
||||
GL(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
|
||||
if (renderFramebufferStatus != GL_FRAMEBUFFER_COMPLETE) {
|
||||
ALOGE(
|
||||
"Incomplete frame buffer object: %s",
|
||||
GlFrameBufferStatusString(renderFramebufferStatus));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static void ovrFramebuffer_Destroy(ovrFramebuffer* frameBuffer) {
|
||||
GL(glDeleteFramebuffers(frameBuffer->TextureSwapChainLength, frameBuffer->FrameBuffers));
|
||||
GL(glDeleteRenderbuffers(frameBuffer->TextureSwapChainLength, frameBuffer->DepthBuffers));
|
||||
OXR(xrDestroySwapchain(frameBuffer->ColorSwapChain.Handle));
|
||||
free(frameBuffer->ColorSwapChainImage);
|
||||
|
||||
free(frameBuffer->DepthBuffers);
|
||||
free(frameBuffer->FrameBuffers);
|
||||
|
||||
ovrFramebuffer_Clear(frameBuffer);
|
||||
}
|
||||
|
||||
static void ovrFramebuffer_SetCurrent(ovrFramebuffer* frameBuffer) {
|
||||
GL(glBindFramebuffer(
|
||||
GL_DRAW_FRAMEBUFFER, frameBuffer->FrameBuffers[frameBuffer->TextureSwapChainIndex]));
|
||||
}
|
||||
|
||||
static void ovrFramebuffer_SetNone() {
|
||||
GL(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
|
||||
}
|
||||
|
||||
static void ovrFramebuffer_Resolve(ovrFramebuffer* frameBuffer) {
|
||||
// Discard the depth buffer, so the tiler won't need to write it back out to memory.
|
||||
const GLenum depthAttachment[1] = {GL_DEPTH_ATTACHMENT};
|
||||
glInvalidateFramebuffer(GL_DRAW_FRAMEBUFFER, 1, depthAttachment);
|
||||
|
||||
// We now let the resolve happen implicitly.
|
||||
}
|
||||
|
||||
static void ovrFramebuffer_Acquire(ovrFramebuffer* frameBuffer) {
|
||||
// Acquire the swapchain image
|
||||
XrSwapchainImageAcquireInfo acquireInfo = {XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO, NULL};
|
||||
OXR(xrAcquireSwapchainImage(
|
||||
frameBuffer->ColorSwapChain.Handle, &acquireInfo, &frameBuffer->TextureSwapChainIndex));
|
||||
|
||||
XrSwapchainImageWaitInfo waitInfo;
|
||||
waitInfo.type = XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO;
|
||||
waitInfo.next = NULL;
|
||||
waitInfo.timeout = 1000000000; /* timeout in nanoseconds */
|
||||
XrResult res = xrWaitSwapchainImage(frameBuffer->ColorSwapChain.Handle, &waitInfo);
|
||||
int i = 0;
|
||||
while (res == XR_TIMEOUT_EXPIRED) {
|
||||
res = xrWaitSwapchainImage(frameBuffer->ColorSwapChain.Handle, &waitInfo);
|
||||
i++;
|
||||
ALOGV(
|
||||
" Retry xrWaitSwapchainImage %d times due to XR_TIMEOUT_EXPIRED (duration %f seconds)",
|
||||
i,
|
||||
waitInfo.timeout * (1E-9));
|
||||
}
|
||||
}
|
||||
|
||||
static void ovrFramebuffer_Release(ovrFramebuffer* frameBuffer) {
|
||||
XrSwapchainImageReleaseInfo releaseInfo = {XR_TYPE_SWAPCHAIN_IMAGE_RELEASE_INFO, NULL};
|
||||
OXR(xrReleaseSwapchainImage(frameBuffer->ColorSwapChain.Handle, &releaseInfo));
|
||||
}
|
||||
|
||||
/*
|
||||
================================================================================
|
||||
|
||||
ovrRenderer
|
||||
|
||||
================================================================================
|
||||
*/
|
||||
|
||||
static void ovrRenderer_Clear(ovrRenderer* renderer) {
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
ovrFramebuffer_Clear(&renderer->FrameBuffer[eye]);
|
||||
}
|
||||
}
|
||||
|
||||
static void ovrRenderer_Create(
|
||||
XrSession session,
|
||||
ovrRenderer* renderer,
|
||||
int suggestedEyeTextureWidth,
|
||||
int suggestedEyeTextureHeight) {
|
||||
// Create the frame buffers.
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
ovrFramebuffer_Create(
|
||||
session,
|
||||
&renderer->FrameBuffer[eye],
|
||||
GL_SRGB8_ALPHA8,
|
||||
suggestedEyeTextureWidth,
|
||||
suggestedEyeTextureHeight,
|
||||
NUM_MULTI_SAMPLES);
|
||||
}
|
||||
}
|
||||
|
||||
static void ovrRenderer_Destroy(ovrRenderer* renderer) {
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
ovrFramebuffer_Destroy(&renderer->FrameBuffer[eye]);
|
||||
}
|
||||
}
|
||||
|
||||
static void ovrRenderer_SetFoveation(
|
||||
XrInstance* instance,
|
||||
XrSession* session,
|
||||
ovrRenderer* renderer,
|
||||
XrFoveationLevelFB level,
|
||||
float verticalOffset,
|
||||
XrFoveationDynamicFB dynamic) {
|
||||
PFN_xrCreateFoveationProfileFB pfnCreateFoveationProfileFB;
|
||||
OXR(xrGetInstanceProcAddr(
|
||||
*instance,
|
||||
"xrCreateFoveationProfileFB",
|
||||
(PFN_xrVoidFunction*)(&pfnCreateFoveationProfileFB)));
|
||||
|
||||
PFN_xrDestroyFoveationProfileFB pfnDestroyFoveationProfileFB;
|
||||
OXR(xrGetInstanceProcAddr(
|
||||
*instance,
|
||||
"xrDestroyFoveationProfileFB",
|
||||
(PFN_xrVoidFunction*)(&pfnDestroyFoveationProfileFB)));
|
||||
|
||||
PFN_xrUpdateSwapchainFB pfnUpdateSwapchainFB;
|
||||
OXR(xrGetInstanceProcAddr(
|
||||
*instance, "xrUpdateSwapchainFB", (PFN_xrVoidFunction*)(&pfnUpdateSwapchainFB)));
|
||||
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
XrFoveationLevelProfileCreateInfoFB levelProfileCreateInfo;
|
||||
memset(&levelProfileCreateInfo, 0, sizeof(levelProfileCreateInfo));
|
||||
levelProfileCreateInfo.type = XR_TYPE_FOVEATION_LEVEL_PROFILE_CREATE_INFO_FB;
|
||||
levelProfileCreateInfo.level = level;
|
||||
levelProfileCreateInfo.verticalOffset = verticalOffset;
|
||||
levelProfileCreateInfo.dynamic = dynamic;
|
||||
|
||||
XrFoveationProfileCreateInfoFB profileCreateInfo;
|
||||
memset(&profileCreateInfo, 0, sizeof(profileCreateInfo));
|
||||
profileCreateInfo.type = XR_TYPE_FOVEATION_PROFILE_CREATE_INFO_FB;
|
||||
profileCreateInfo.next = &levelProfileCreateInfo;
|
||||
|
||||
XrFoveationProfileFB foveationProfile;
|
||||
|
||||
pfnCreateFoveationProfileFB(*session, &profileCreateInfo, &foveationProfile);
|
||||
|
||||
XrSwapchainStateFoveationFB foveationUpdateState;
|
||||
memset(&foveationUpdateState, 0, sizeof(foveationUpdateState));
|
||||
foveationUpdateState.type = XR_TYPE_SWAPCHAIN_STATE_FOVEATION_FB;
|
||||
foveationUpdateState.profile = foveationProfile;
|
||||
|
||||
pfnUpdateSwapchainFB(
|
||||
renderer->FrameBuffer[eye].ColorSwapChain.Handle,
|
||||
(XrSwapchainStateBaseHeaderFB*)(&foveationUpdateState));
|
||||
|
||||
pfnDestroyFoveationProfileFB(foveationProfile);
|
||||
}
|
||||
}
|
||||
|
||||
static inline ovrMatrix4f ovrMatrix4f_CreateProjection(
|
||||
const float minX,
|
||||
const float maxX,
|
||||
float const minY,
|
||||
const float maxY,
|
||||
const float nearZ,
|
||||
const float farZ) {
|
||||
const float width = maxX - minX;
|
||||
const float height = maxY - minY;
|
||||
const float offsetZ = nearZ; // set to zero for a [0,1] clip space
|
||||
|
||||
ovrMatrix4f out;
|
||||
if (farZ <= nearZ) {
|
||||
// place the far plane at infinity
|
||||
out.M[0][0] = 2 * nearZ / width;
|
||||
out.M[0][1] = 0;
|
||||
out.M[0][2] = (maxX + minX) / width;
|
||||
out.M[0][3] = 0;
|
||||
|
||||
out.M[1][0] = 0;
|
||||
out.M[1][1] = 2 * nearZ / height;
|
||||
out.M[1][2] = (maxY + minY) / height;
|
||||
out.M[1][3] = 0;
|
||||
|
||||
out.M[2][0] = 0;
|
||||
out.M[2][1] = 0;
|
||||
out.M[2][2] = -1;
|
||||
out.M[2][3] = -(nearZ + offsetZ);
|
||||
|
||||
out.M[3][0] = 0;
|
||||
out.M[3][1] = 0;
|
||||
out.M[3][2] = -1;
|
||||
out.M[3][3] = 0;
|
||||
} else {
|
||||
// normal projection
|
||||
out.M[0][0] = 2 * nearZ / width;
|
||||
out.M[0][1] = 0;
|
||||
out.M[0][2] = (maxX + minX) / width;
|
||||
out.M[0][3] = 0;
|
||||
|
||||
out.M[1][0] = 0;
|
||||
out.M[1][1] = 2 * nearZ / height;
|
||||
out.M[1][2] = (maxY + minY) / height;
|
||||
out.M[1][3] = 0;
|
||||
|
||||
out.M[2][0] = 0;
|
||||
out.M[2][1] = 0;
|
||||
out.M[2][2] = -(farZ + offsetZ) / (farZ - nearZ);
|
||||
out.M[2][3] = -(farZ * (nearZ + offsetZ)) / (farZ - nearZ);
|
||||
|
||||
out.M[3][0] = 0;
|
||||
out.M[3][1] = 0;
|
||||
out.M[3][2] = -1;
|
||||
out.M[3][3] = 0;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static inline ovrMatrix4f ovrMatrix4f_CreateProjectionFov(
|
||||
const float fovDegreesX,
|
||||
const float fovDegreesY,
|
||||
const float offsetX,
|
||||
const float offsetY,
|
||||
const float nearZ,
|
||||
const float farZ) {
|
||||
const float halfWidth = nearZ * tanf(fovDegreesX * (M_PI / 180.0f * 0.5f));
|
||||
const float halfHeight = nearZ * tanf(fovDegreesY * (M_PI / 180.0f * 0.5f));
|
||||
|
||||
const float minX = offsetX - halfWidth;
|
||||
const float maxX = offsetX + halfWidth;
|
||||
|
||||
const float minY = offsetY - halfHeight;
|
||||
const float maxY = offsetY + halfHeight;
|
||||
|
||||
return ovrMatrix4f_CreateProjection(minX, maxX, minY, maxY, nearZ, farZ);
|
||||
}
|
|
@ -259,7 +259,6 @@ void VR_EnterVR( engine_t* engine, ovrJava java ) {
|
|||
sessionCreateInfo.next = &graphicsBindingAndroidGLES;
|
||||
sessionCreateInfo.createFlags = 0;
|
||||
sessionCreateInfo.systemId = engine->systemId;
|
||||
|
||||
if (xrCreateSession(engine->instance, &sessionCreateInfo, &engine->session) != XR_SUCCESS) {
|
||||
Com_Printf("xrCreateSession failed");
|
||||
exit(1);
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "../qcommon/q_shared.h"
|
||||
#include "../qcommon/qcommon.h"
|
||||
#include "../client/client.h"
|
||||
#include "ovr_renderer.inl"
|
||||
|
||||
#include "vr_clientinfo.h"
|
||||
#include "vr_types.h"
|
||||
|
@ -23,6 +24,7 @@
|
|||
|
||||
extern vr_clientinfo_t vr;
|
||||
|
||||
|
||||
void APIENTRY VR_GLDebugLog(GLenum source, GLenum type, GLuint id,
|
||||
GLenum severity, GLsizei length, const GLchar* message, const void* userParam)
|
||||
{
|
||||
|
@ -87,64 +89,21 @@ void VR_InitRenderer( engine_t* engine ) {
|
|||
glDebugMessageCallback(VR_GLDebugLog, 0);
|
||||
#endif
|
||||
|
||||
//TODO:
|
||||
/*
|
||||
int eyeW, eyeH;
|
||||
VR_GetResolution(engine, &eyeW, &eyeH);
|
||||
ovrRenderer_Create(engine->session, &engine->renderer, eyeW, eyeH);
|
||||
|
||||
for (int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; ++eye) {
|
||||
framebuffer_t* framebuffer = &engine->framebuffers[eye];
|
||||
framebuffer->colorTexture = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_RGBA8,
|
||||
eyeW, eyeH, 1, 3);
|
||||
framebuffer->swapchainLength = vrapi_GetTextureSwapChainLength(framebuffer->colorTexture);
|
||||
framebuffer->depthBuffers = (GLuint*)malloc(framebuffer->swapchainLength * sizeof(GLuint));
|
||||
framebuffer->framebuffers = (GLuint*)malloc(framebuffer->swapchainLength * sizeof(GLuint));
|
||||
|
||||
for (int index = 0; index < framebuffer->swapchainLength; ++index) {
|
||||
GLuint colorTexture;
|
||||
GLenum framebufferStatus;
|
||||
|
||||
colorTexture = vrapi_GetTextureSwapChainHandle(framebuffer->colorTexture, index);
|
||||
glBindTexture(GL_TEXTURE_2D, colorTexture);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
glGenRenderbuffers(1, &framebuffer->depthBuffers[index]);
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, framebuffer->depthBuffers[index]);
|
||||
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, eyeW, eyeH);
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, 0);
|
||||
|
||||
glGenFramebuffers(1, &framebuffer->framebuffers[index]);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, framebuffer->framebuffers[index]);
|
||||
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER,
|
||||
framebuffer->depthBuffers[index]);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorTexture, 0);
|
||||
framebufferStatus = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER);
|
||||
assert(framebufferStatus == GL_FRAMEBUFFER_COMPLETE);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
}
|
||||
}
|
||||
*/
|
||||
XrReferenceSpaceCreateInfo spaceCreateInfo = {};
|
||||
spaceCreateInfo.type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO;
|
||||
spaceCreateInfo.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_STAGE;
|
||||
spaceCreateInfo.poseInReferenceSpace.orientation.w = 1.0f;
|
||||
spaceCreateInfo.poseInReferenceSpace.position.y = 0.0f;
|
||||
xrCreateReferenceSpace(engine->session, &spaceCreateInfo, &engine->stageSpace);
|
||||
}
|
||||
|
||||
void VR_DestroyRenderer( engine_t* engine ) {
|
||||
//TODO:
|
||||
/*
|
||||
for (int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; ++eye)
|
||||
{
|
||||
if (engine->framebuffers[eye].swapchainLength > 0) {
|
||||
glDeleteFramebuffers(engine->framebuffers[eye].swapchainLength,
|
||||
engine->framebuffers[eye].depthBuffers);
|
||||
free(engine->framebuffers[eye].depthBuffers);
|
||||
free(engine->framebuffers[eye].framebuffers);
|
||||
|
||||
vrapi_DestroyTextureSwapChain(engine->framebuffers[eye].colorTexture);
|
||||
|
||||
memset(&engine->framebuffers[eye], 0, sizeof(engine->framebuffers[eye]));
|
||||
}
|
||||
}
|
||||
*/
|
||||
xrDestroySpace(engine->stageSpace);
|
||||
ovrRenderer_Destroy(&engine->renderer);
|
||||
}
|
||||
|
||||
|
||||
|
@ -238,9 +197,8 @@ ovrLayerCylinder2 BuildCylinderLayer(engine_t* engine, const int textureWidth, c
|
|||
}
|
||||
*/
|
||||
|
||||
void VR_ClearFrameBuffer( GLuint frameBuffer, int width, int height)
|
||||
void VR_ClearFrameBuffer( int width, int height )
|
||||
{
|
||||
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, frameBuffer );
|
||||
|
||||
glEnable( GL_SCISSOR_TEST );
|
||||
glViewport( 0, 0, width, height );
|
||||
|
@ -261,11 +219,165 @@ void VR_ClearFrameBuffer( GLuint frameBuffer, int width, int height)
|
|||
|
||||
glScissor( 0, 0, 0, 0 );
|
||||
glDisable( GL_SCISSOR_TEST );
|
||||
|
||||
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, 0 );
|
||||
}
|
||||
|
||||
void VR_DrawFrame( engine_t* engine ) {
|
||||
XrEventDataBuffer eventDataBuffer = {};
|
||||
|
||||
// Poll for events
|
||||
for (;;) {
|
||||
XrEventDataBaseHeader *baseEventHeader = (XrEventDataBaseHeader * )(&eventDataBuffer);
|
||||
baseEventHeader->type = XR_TYPE_EVENT_DATA_BUFFER;
|
||||
baseEventHeader->next = NULL;
|
||||
if (xrPollEvent(engine->instance, &eventDataBuffer) != XR_SUCCESS) {
|
||||
break;
|
||||
}
|
||||
if (baseEventHeader->type == XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED) {
|
||||
const XrEventDataSessionStateChanged* session_state_changed_event =
|
||||
(XrEventDataSessionStateChanged*)(baseEventHeader);
|
||||
switch (session_state_changed_event->state) {
|
||||
case XR_SESSION_STATE_READY:
|
||||
if (!engine->sessionActive) {
|
||||
XrSessionBeginInfo sessionBeginInfo;
|
||||
memset(&sessionBeginInfo, 0, sizeof(sessionBeginInfo));
|
||||
sessionBeginInfo.type = XR_TYPE_SESSION_BEGIN_INFO;
|
||||
sessionBeginInfo.next = NULL;
|
||||
sessionBeginInfo.primaryViewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO;
|
||||
if (xrBeginSession(engine->session, &sessionBeginInfo) != XR_SUCCESS) {
|
||||
Com_Printf("xrBeginSession failed");
|
||||
exit(1);
|
||||
}
|
||||
engine->sessionActive = GL_TRUE;
|
||||
}
|
||||
break;
|
||||
case XR_SESSION_STATE_STOPPING:
|
||||
if (engine->sessionActive) {
|
||||
xrEndSession(engine->session);
|
||||
engine->sessionActive = GL_FALSE;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!engine->sessionActive) {
|
||||
return;
|
||||
}
|
||||
|
||||
// NOTE: OpenXR does not use the concept of frame indices. Instead,
|
||||
// XrWaitFrame returns the predicted display time.
|
||||
XrFrameWaitInfo waitFrameInfo = {};
|
||||
waitFrameInfo.type = XR_TYPE_FRAME_WAIT_INFO;
|
||||
waitFrameInfo.next = NULL;
|
||||
XrFrameState frameState = {};
|
||||
frameState.type = XR_TYPE_FRAME_STATE;
|
||||
frameState.next = NULL;
|
||||
xrWaitFrame(engine->session, &waitFrameInfo, &frameState);
|
||||
|
||||
XrFrameBeginInfo beginFrameDesc = {};
|
||||
beginFrameDesc.type = XR_TYPE_FRAME_BEGIN_INFO;
|
||||
beginFrameDesc.next = NULL;
|
||||
xrBeginFrame(engine->session, &beginFrameDesc);
|
||||
|
||||
float fov_y = 90; //TODO:
|
||||
float fov_x = 90; //TODO:
|
||||
|
||||
if (vr.weapon_zoomed) {
|
||||
vr.weapon_zoomLevel += 0.05;
|
||||
if (vr.weapon_zoomLevel > 2.5f)
|
||||
vr.weapon_zoomLevel = 2.5f;
|
||||
}
|
||||
else {
|
||||
//Zoom back out quicker
|
||||
vr.weapon_zoomLevel -= 0.25f;
|
||||
if (vr.weapon_zoomLevel < 1.0f)
|
||||
vr.weapon_zoomLevel = 1.0f;
|
||||
}
|
||||
|
||||
const ovrMatrix4f projectionMatrix = ovrMatrix4f_CreateProjectionFov(
|
||||
fov_x / vr.weapon_zoomLevel, fov_y / vr.weapon_zoomLevel, 0.0f, 0.0f, 1.0f, 0.0f );
|
||||
re.SetVRHeadsetParms(projectionMatrix.M,
|
||||
engine->renderer.FrameBuffer[0].FrameBuffers[engine->renderer.FrameBuffer[0].TextureSwapChainIndex],
|
||||
engine->renderer.FrameBuffer[1].FrameBuffers[engine->renderer.FrameBuffer[1].TextureSwapChainIndex]);
|
||||
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
ovrFramebuffer* frameBuffer = &engine->renderer.FrameBuffer[eye];
|
||||
ovrFramebuffer_Acquire(frameBuffer);
|
||||
ovrFramebuffer_SetCurrent(frameBuffer);
|
||||
|
||||
VR_ClearFrameBuffer(frameBuffer->Width, frameBuffer->Height);
|
||||
Com_Frame();
|
||||
|
||||
ovrFramebuffer_Resolve(frameBuffer);
|
||||
ovrFramebuffer_Release(frameBuffer);
|
||||
}
|
||||
ovrFramebuffer_SetNone();
|
||||
|
||||
// Compose the layers for this frame.
|
||||
XrCompositionLayerProjectionView projection_layer_elements[XR_EYES_COUNT] = {};
|
||||
XrCompositionLayerProjection projection_layer = {};
|
||||
projection_layer.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION;
|
||||
projection_layer.layerFlags = XR_COMPOSITION_LAYER_BLEND_TEXTURE_SOURCE_ALPHA_BIT;
|
||||
projection_layer.layerFlags |= XR_COMPOSITION_LAYER_CORRECT_CHROMATIC_ABERRATION_BIT;
|
||||
projection_layer.space = engine->stageSpace;
|
||||
projection_layer.viewCount = XR_EYES_COUNT;
|
||||
projection_layer.views = projection_layer_elements;
|
||||
|
||||
XrPosef viewTransform[2];
|
||||
ovrSceneMatrices sceneMatrices;
|
||||
XrView* projections = (XrView*)(malloc(XR_EYES_COUNT * sizeof(XrView)));
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
XrPosef xfHeadFromEye = projections[eye].pose;
|
||||
//XrPosef xfStageFromEye = XrPosef_Multiply(xfStageFromHead, xfHeadFromEye);
|
||||
viewTransform[eye] = XrPosef_Inverse(xfHeadFromEye); //TODO:there should be xfStageFromEye as parameter
|
||||
|
||||
sceneMatrices.ViewMatrix[eye] =
|
||||
XrMatrix4x4f_CreateFromRigidTransform(&viewTransform[eye]);
|
||||
const XrFovf fov = projections[eye].fov;
|
||||
XrMatrix4x4f_CreateProjectionFov(
|
||||
&sceneMatrices.ProjectionMatrix[eye],
|
||||
fov.angleLeft,
|
||||
fov.angleRight,
|
||||
fov.angleUp,
|
||||
fov.angleDown,
|
||||
0.1f,
|
||||
0.0f);
|
||||
}
|
||||
|
||||
for (int eye = 0; eye < XR_EYES_COUNT; eye++) {
|
||||
ovrFramebuffer* frameBuffer = &engine->renderer.FrameBuffer[eye];
|
||||
|
||||
memset(&projection_layer_elements[eye], 0, sizeof(XrCompositionLayerProjectionView));
|
||||
projection_layer_elements[eye].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
|
||||
|
||||
projection_layer_elements[eye].pose = XrPosef_Inverse(viewTransform[eye]);
|
||||
projection_layer_elements[eye].fov = projections[eye].fov;
|
||||
|
||||
memset(&projection_layer_elements[eye].subImage, 0, sizeof(XrSwapchainSubImage));
|
||||
projection_layer_elements[eye].subImage.swapchain = frameBuffer->ColorSwapChain.Handle;
|
||||
projection_layer_elements[eye].subImage.imageRect.offset.x = 0;
|
||||
projection_layer_elements[eye].subImage.imageRect.offset.y = 0;
|
||||
projection_layer_elements[eye].subImage.imageRect.extent.width =
|
||||
frameBuffer->ColorSwapChain.Width;
|
||||
projection_layer_elements[eye].subImage.imageRect.extent.height =
|
||||
frameBuffer->ColorSwapChain.Height;
|
||||
projection_layer_elements[eye].subImage.imageArrayIndex = 0;
|
||||
}
|
||||
|
||||
|
||||
// Compose the layers for this frame.
|
||||
const XrCompositionLayerBaseHeader* layers[1] = {};
|
||||
layers[0] = (const XrCompositionLayerBaseHeader*)&projection_layer;
|
||||
|
||||
XrFrameEndInfo endFrameInfo = {};
|
||||
endFrameInfo.type = XR_TYPE_FRAME_END_INFO;
|
||||
endFrameInfo.displayTime = frameState.predictedDisplayTime;
|
||||
endFrameInfo.environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE;
|
||||
endFrameInfo.layerCount = 1;
|
||||
endFrameInfo.layers = layers;
|
||||
|
||||
xrEndFrame(engine->session, &endFrameInfo);
|
||||
free(projections);
|
||||
|
||||
//TODO
|
||||
/*
|
||||
|
|
|
@ -10,13 +10,18 @@
|
|||
#endif
|
||||
|
||||
//OpenXR
|
||||
#define XR_EYES_COUNT 2
|
||||
#define XR_USE_GRAPHICS_API_OPENGL_ES 1
|
||||
#define XR_USE_PLATFORM_ANDROID 1
|
||||
#include <EGL/egl.h>
|
||||
#include <EGL/eglext.h>
|
||||
#include <GLES3/gl3.h>
|
||||
#include <GLES3/gl3ext.h>
|
||||
#include <jni.h>
|
||||
#include <openxr/openxr.h>
|
||||
#include <openxr/openxr_platform.h>
|
||||
#include <openxr/openxr_oculus.h>
|
||||
#include <openxr/openxr_oculus_helpers.h>
|
||||
|
||||
typedef struct {
|
||||
JavaVM* Vm;
|
||||
|
@ -25,22 +30,45 @@ typedef struct {
|
|||
} ovrJava;
|
||||
|
||||
typedef struct {
|
||||
int swapchainLength;
|
||||
int swapchainIndex;
|
||||
//TODO:ovrTextureSwapChain* colorTexture;
|
||||
GLuint* depthBuffers;
|
||||
GLuint* framebuffers;
|
||||
} framebuffer_t;
|
||||
XrSwapchain Handle;
|
||||
uint32_t Width;
|
||||
uint32_t Height;
|
||||
} ovrSwapChain;
|
||||
|
||||
typedef struct {
|
||||
int Width;
|
||||
int Height;
|
||||
int Multisamples;
|
||||
uint32_t TextureSwapChainLength;
|
||||
uint32_t TextureSwapChainIndex;
|
||||
ovrSwapChain ColorSwapChain;
|
||||
XrSwapchainImageOpenGLESKHR* ColorSwapChainImage;
|
||||
GLuint* DepthBuffers;
|
||||
GLuint* FrameBuffers;
|
||||
} ovrFramebuffer;
|
||||
|
||||
typedef struct {
|
||||
ovrFramebuffer FrameBuffer[XR_EYES_COUNT];
|
||||
} ovrRenderer;
|
||||
|
||||
typedef struct {
|
||||
XrMatrix4x4f ViewMatrix[XR_EYES_COUNT];
|
||||
XrMatrix4x4f ProjectionMatrix[XR_EYES_COUNT];
|
||||
} ovrSceneMatrices;
|
||||
|
||||
typedef struct ovrMatrix4f_ {
|
||||
float M[4][4];
|
||||
} ovrMatrix4f;
|
||||
|
||||
typedef struct {
|
||||
uint64_t frameIndex;
|
||||
ovrJava java;
|
||||
double predictedDisplayTime;
|
||||
//TODO:ovrTracking2 tracking;
|
||||
framebuffer_t framebuffers[2];
|
||||
ovrRenderer renderer;
|
||||
XrInstance instance;
|
||||
XrSession session;
|
||||
XrSystemId systemId;
|
||||
XrSpace stageSpace;
|
||||
GLboolean sessionActive;
|
||||
} engine_t;
|
||||
|
||||
typedef enum {
|
||||
|
|
|
@ -84,9 +84,8 @@ int main(int argc, char* argv[]) {
|
|||
Com_Init(args);
|
||||
NET_Init( );
|
||||
|
||||
VR_InitRenderer(engine);
|
||||
|
||||
VR_EnterVR(engine, java);
|
||||
VR_InitRenderer(engine);
|
||||
|
||||
bool hasFocus = true;
|
||||
bool paused = false;
|
||||
|
|
Loading…
Reference in a new issue