Merge pull request #69 from lvonasek/feature_openxr

OpenXR integration
This commit is contained in:
Simon 2022-05-01 10:20:40 +01:00 committed by GitHub
commit cb152467ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1811 additions and 466 deletions

2
.gitignore vendored
View File

@ -1,4 +1,6 @@
build
android/app/src/main/cpp/code/OpenXR-SDK
android/app/src/main/cpp/code/OpenXR
android/app/src/main/cpp/code/VrApi
*.swp
*tags

View File

@ -281,7 +281,8 @@ LOKISETUPDIR=misc/setup
NSISDIR=misc/nsis
SDLHDIR=$(MOUNT_DIR)/SDL2
LIBSDIR=$(MOUNT_DIR)/libs
VRAPIDIR=$(MOUNT_DIR)/VrApi
OPENXRDIR=$(MOUNT_DIR)/OpenXR
OPENXRSDKDIR=$(MOUNT_DIR)/OpenXR-SDK
bin_path=$(shell which $(1) 2> /dev/null)
@ -468,11 +469,11 @@ ifeq ($(PLATFORM),android)
RENDERER_LIBS += $(LIBSDIR)/android/arm64-v8a/libSDL2.so
CLIENT_EXTRA_FILES += $(LIBSDIR)/android/arm64-v8a/libSDL2.so
# VrApi
BASE_CFLAGS += -I$(VRAPIDIR)/Include
CLIENT_LIBS += $(VRAPIDIR)/Libs/Android/arm64-v8a/Release/libvrapi.so
RENDERER_LIBS += $(VRAPIDIR)/Libs/Android/arm64-v8a/Release/libvrapi.so
CLIENT_EXTRA_FILES += $(VRAPIDIR)/Libs/Android/arm64-v8a/Release/libvrapi.so
# OpenXR
BASE_CFLAGS += -I$(OPENXRDIR)/Include -I$(OPENXRSDKDIR)/include
CLIENT_LIBS += $(OPENXRDIR)/Libs/Android/arm64-v8a/Release/libopenxr_loader.so
RENDERER_LIBS += $(OPENXRDIR)/Libs/Android/arm64-v8a/Release/libopenxr_loader.so
CLIENT_EXTRA_FILES += $(OPENXRDIR)/Libs/Android/arm64-v8a/Release/libopenxr_loader.so
else # ifeq Android
#############################################################################
@ -1819,6 +1820,7 @@ Q3OBJ = \
$(B)/client/vr_base.o \
$(B)/client/vr_input.o \
$(B)/client/vr_renderer.o \
$(B)/client/vr_types.o \
\
$(B)/client/con_log.o \
$(B)/client/sys_autoupdater.o \

View File

@ -5,7 +5,9 @@
### Prerequisites
1. Install your copy of Quake III Arena from Steam.
2. Android Studio with NDK version 21.1.6352462.
3. Copy the Oculus VR SDK from https://developer.oculus.com/downloads/package/oculus-mobile-sdk/ and extract the VrApi folder to ./android/app/src/main/cpp/code/VrApi/
3. Download the Oculus OpenXR SDK from https://developer.oculus.com/downloads/package/oculus-openxr-mobile-sdk/
4. Extract the OpenXR folder to ./android/app/src/main/cpp/code/OpenXR/
5. Extract the 3rdParty/khronos/openxr/OpenXR-SDK folder to ./android/app/src/main/cpp/code/OpenXR-SDK/
### Building and running the build
The scripts assume that you installed everything in the default locations. In case you want to deviate from that, the paths are in ./android/run.(sh|bat) and in Makefile.local.

View File

@ -15,7 +15,7 @@ endif()
add_custom_target(copy_libs
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/libioquake3_opengl2.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libioquake3_opengl2.so
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/libSDL2.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libSDL2.so
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/libvrapi.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libvrapi.so
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/libopenxr_loader.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libopenxr_loader.so
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/baseq3/cgameaarch64.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libcgameaarch64_baseq3.so
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/baseq3/qagameaarch64.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libqagameaarch64_baseq3.so
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/../../../../../build/${BUILD_FOLDER}/baseq3/uiaarch64.so ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libuiaarch64_baseq3.so
@ -27,7 +27,8 @@ add_custom_target(copy_libs
add_dependencies(main copy_libs)
target_include_directories(main PRIVATE
${CMAKE_SOURCE_DIR}/code/VrApi/Include
${CMAKE_SOURCE_DIR}/code/OpenXR-SDK/include
${CMAKE_SOURCE_DIR}/code/OpenXR/Include
${CMAKE_SOURCE_DIR}/code/SDL2/include
${CMAKE_SOURCE_DIR}/code)
@ -40,5 +41,5 @@ target_link_libraries(main
android
ioquake3_opengl2
SDL2
vrapi
openxr_loader
${log-lib})

View File

@ -24,13 +24,6 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#include "tr_types.h"
#if __ANDROID__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wstrict-prototypes"
#include <VrApi.h>
#pragma clang diagnostic pop
#endif
#define REF_API_VERSION 8
//
@ -92,8 +85,9 @@ typedef struct {
void (*EndFrame)( int *frontEndMsec, int *backEndMsec );
#if __ANDROID__
void (*SetVRHeadsetParms)( const ovrMatrix4f *projectionMatrix, const ovrMatrix4f *nonVRProjectionMatrix,
int renderBuffer );
void (*SetVRHeadsetParms)( const float projectionMatrix[4][4],
const float nonVRProjectionMatrix[4][4],
int renderBuffer );
#endif
int (*MarkFragments)( int numPoints, const vec3_t *points, const vec3_t projection,

View File

@ -513,11 +513,10 @@ void R_Mat4Transpose( const float in[4][4], float* out ) {
}
}
void RE_SetVRHeadsetParms( const ovrMatrix4f *projectionMatrix, const ovrMatrix4f *nonVRProjectionMatrix,
void RE_SetVRHeadsetParms( const float projectionMatrix[4][4], const float nonVRProjectionMatrix[4][4],
int renderBuffer ) {
R_Mat4Transpose(projectionMatrix->M, tr.vrParms.projection);
R_Mat4Transpose(nonVRProjectionMatrix->M, tr.vrParms.monoVRProjection);
R_Mat4Transpose(projectionMatrix, tr.vrParms.projection);
R_Mat4Transpose(nonVRProjectionMatrix, tr.vrParms.monoVRProjection);
tr.vrParms.renderBuffer = renderBuffer;
tr.vrParms.valid = qtrue;
}

View File

@ -2535,8 +2535,9 @@ void RE_StretchPic ( float x, float y, float w, float h,
void RE_BeginFrame( stereoFrame_t stereoFrame );
void RE_EndFrame( int *frontEndMsec, int *backEndMsec );
#if __ANDROID__
void RE_SetVRHeadsetParms( const ovrMatrix4f *projectionMatrix, const ovrMatrix4f *nonVRProjectionMatrix,
int renderBuffer );
void RE_SetVRHeadsetParms( const float projectionMatrix[4][4],
const float nonVRProjectionMatrix[4][4],
int renderBuffer );
#endif
void RE_HUDBufferStart( qboolean clear );
void RE_HUDBufferEnd( void );

View File

@ -1,22 +1,24 @@
#include "vr_base.h"
#include "../VrApi/Include/VrApi.h"
#include "../VrApi/Include/VrApi_Helpers.h"
#include "../VrApi/Include/VrApi_Types.h"
#include "../qcommon/q_shared.h"
#include "../qcommon/qcommon.h"
#include "../client/client.h"
//#if __ANDROID__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wstrict-prototypes"
#pragma clang diagnostic pop
#include <EGL/egl.h>
#include <assert.h>
#include <unistd.h>
static engine_t vr_engine;
qboolean vr_initialized = qfalse;
const char* const requiredExtensionNames[] = {
XR_KHR_OPENGL_ES_ENABLE_EXTENSION_NAME,
XR_EXT_PERFORMANCE_SETTINGS_EXTENSION_NAME,
XR_KHR_ANDROID_THREAD_SETTINGS_EXTENSION_NAME,
XR_KHR_COMPOSITION_LAYER_CYLINDER_EXTENSION_NAME,
XR_FB_DISPLAY_REFRESH_RATE_EXTENSION_NAME};
const uint32_t numRequiredExtensions =
sizeof(requiredExtensionNames) / sizeof(requiredExtensionNames[0]);
cvar_t *vr_worldscale = NULL;
cvar_t *vr_worldscaleScaler = NULL;
@ -46,18 +48,92 @@ cvar_t *vr_showConsoleMessages = NULL;
engine_t* VR_Init( ovrJava java )
{
ovrInitParms initParams;
ovrResult initResult;
if (vr_initialized)
return &vr_engine;
memset(&vr_engine, 0, sizeof(vr_engine));
ovrApp_Clear(&vr_engine.appState);
initParams = vrapi_DefaultInitParms(&java);
initResult = vrapi_Initialize(&initParams);
assert(initResult == VRAPI_INITIALIZE_SUCCESS);
vr_engine.java = java;
PFN_xrInitializeLoaderKHR xrInitializeLoaderKHR;
xrGetInstanceProcAddr(
XR_NULL_HANDLE, "xrInitializeLoaderKHR", (PFN_xrVoidFunction*)&xrInitializeLoaderKHR);
if (xrInitializeLoaderKHR != NULL) {
XrLoaderInitInfoAndroidKHR loaderInitializeInfoAndroid;
memset(&loaderInitializeInfoAndroid, 0, sizeof(loaderInitializeInfoAndroid));
loaderInitializeInfoAndroid.type = XR_TYPE_LOADER_INIT_INFO_ANDROID_KHR;
loaderInitializeInfoAndroid.next = NULL;
loaderInitializeInfoAndroid.applicationVM = java.Vm;
loaderInitializeInfoAndroid.applicationContext = java.ActivityObject;
xrInitializeLoaderKHR((XrLoaderInitInfoBaseHeaderKHR*)&loaderInitializeInfoAndroid);
}
return &vr_engine;
// Create the OpenXR instance.
XrApplicationInfo appInfo;
memset(&appInfo, 0, sizeof(appInfo));
strcpy(appInfo.applicationName, "Quake 3 Arena");
appInfo.applicationVersion = 0;
strcpy(appInfo.engineName, "Quake 3 Arena");
appInfo.engineVersion = 0;
appInfo.apiVersion = XR_CURRENT_API_VERSION;
XrInstanceCreateInfo instanceCreateInfo;
memset(&instanceCreateInfo, 0, sizeof(instanceCreateInfo));
instanceCreateInfo.type = XR_TYPE_INSTANCE_CREATE_INFO;
instanceCreateInfo.next = NULL;
instanceCreateInfo.createFlags = 0;
instanceCreateInfo.applicationInfo = appInfo;
instanceCreateInfo.enabledApiLayerCount = 0;
instanceCreateInfo.enabledApiLayerNames = NULL;
instanceCreateInfo.enabledExtensionCount = numRequiredExtensions;
instanceCreateInfo.enabledExtensionNames = requiredExtensionNames;
XrResult initResult;
OXR(initResult = xrCreateInstance(&instanceCreateInfo, &vr_engine.appState.Instance));
if (initResult != XR_SUCCESS) {
ALOGE("Failed to create XR instance: %d.", initResult);
exit(1);
}
XrInstanceProperties instanceInfo;
instanceInfo.type = XR_TYPE_INSTANCE_PROPERTIES;
instanceInfo.next = NULL;
OXR(xrGetInstanceProperties(vr_engine.appState.Instance, &instanceInfo));
ALOGV(
"Runtime %s: Version : %u.%u.%u",
instanceInfo.runtimeName,
XR_VERSION_MAJOR(instanceInfo.runtimeVersion),
XR_VERSION_MINOR(instanceInfo.runtimeVersion),
XR_VERSION_PATCH(instanceInfo.runtimeVersion));
XrSystemGetInfo systemGetInfo;
memset(&systemGetInfo, 0, sizeof(systemGetInfo));
systemGetInfo.type = XR_TYPE_SYSTEM_GET_INFO;
systemGetInfo.next = NULL;
systemGetInfo.formFactor = XR_FORM_FACTOR_HEAD_MOUNTED_DISPLAY;
XrSystemId systemId;
OXR(initResult = xrGetSystem(vr_engine.appState.Instance, &systemGetInfo, &systemId));
if (initResult != XR_SUCCESS) {
ALOGE("Failed to get system.");
exit(1);
}
// Get the graphics requirements.
PFN_xrGetOpenGLESGraphicsRequirementsKHR pfnGetOpenGLESGraphicsRequirementsKHR = NULL;
OXR(xrGetInstanceProcAddr(
vr_engine.appState.Instance,
"xrGetOpenGLESGraphicsRequirementsKHR",
(PFN_xrVoidFunction*)(&pfnGetOpenGLESGraphicsRequirementsKHR)));
XrGraphicsRequirementsOpenGLESKHR graphicsRequirements = {};
graphicsRequirements.type = XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_ES_KHR;
OXR(pfnGetOpenGLESGraphicsRequirementsKHR(vr_engine.appState.Instance, systemId, &graphicsRequirements));
vr_engine.appState.MainThreadTid = gettid();
vr_engine.appState.SystemId = systemId;
vr_engine.java = java;
vr_initialized = qtrue;
return &vr_engine;
}
void VR_InitCvars( void )
@ -184,53 +260,77 @@ void VR_InitCvars( void )
void VR_Destroy( engine_t* engine )
{
if (engine == &vr_engine) {
vrapi_Shutdown();
}
if (engine == &vr_engine) {
xrDestroyInstance(engine->appState.Instance);
ovrApp_Destroy(&engine->appState);
}
}
void VR_EnterVR( engine_t* engine, ovrJava java ) {
if (!engine->ovr) {
ovrModeParms modeParams = vrapi_DefaultModeParms(&java);
modeParams.Display = (size_t)eglGetCurrentDisplay();
modeParams.WindowSurface = (size_t)eglGetCurrentSurface(EGL_DRAW);
modeParams.ShareContext = (size_t)eglGetCurrentContext();
engine->ovr = vrapi_EnterVrMode(&modeParams);
engine->frameIndex = 0;
if (engine->appState.Session) {
Com_Printf("VR_EnterVR called with existing session");
return;
}
vrapi_SetTrackingSpace(engine->ovr, VRAPI_TRACKING_SPACE_LOCAL_FLOOR);
// Create the OpenXR Session.
XrGraphicsBindingOpenGLESAndroidKHR graphicsBindingAndroidGLES = {};
graphicsBindingAndroidGLES.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR;
graphicsBindingAndroidGLES.next = NULL;
graphicsBindingAndroidGLES.display = eglGetCurrentDisplay();
graphicsBindingAndroidGLES.config = eglGetCurrentSurface(EGL_DRAW);
graphicsBindingAndroidGLES.context = eglGetCurrentContext();
vrapi_SetClockLevels(engine->ovr, 4, 4);
}
XrSessionCreateInfo sessionCreateInfo = {};
memset(&sessionCreateInfo, 0, sizeof(sessionCreateInfo));
sessionCreateInfo.type = XR_TYPE_SESSION_CREATE_INFO;
sessionCreateInfo.next = &graphicsBindingAndroidGLES;
sessionCreateInfo.createFlags = 0;
sessionCreateInfo.systemId = engine->appState.SystemId;
XrResult initResult;
OXR(initResult = xrCreateSession(engine->appState.Instance, &sessionCreateInfo, &engine->appState.Session));
if (initResult != XR_SUCCESS) {
ALOGE("Failed to create XR session: %d.", initResult);
exit(1);
}
}
void VR_LeaveVR( engine_t* engine ) {
if (engine->ovr) {
vrapi_LeaveVrMode(engine->ovr);
engine->ovr = NULL;
}
if (engine->appState.Session) {
OXR(xrDestroySpace(engine->appState.HeadSpace));
OXR(xrDestroySpace(engine->appState.LocalSpace));
// StageSpace is optional.
if (engine->appState.StageSpace != XR_NULL_HANDLE) {
OXR(xrDestroySpace(engine->appState.StageSpace));
}
OXR(xrDestroySpace(engine->appState.FakeStageSpace));
engine->appState.CurrentSpace = XR_NULL_HANDLE;
OXR(xrDestroySession(engine->appState.Session));
OXR(xrDestroyInstance(engine->appState.Instance));
engine->appState.Session = NULL;
}
}
engine_t* VR_GetEngine( void ) {
return &vr_engine;
}
bool VR_isPauseable( void )
int VR_isPauseable( void )
{
return (bool)( ( clc.state == CA_ACTIVE) && !Cvar_VariableValue ("cl_paused") );
return ( clc.state == CA_ACTIVE) && !Cvar_VariableValue ("cl_paused");
}
bool VR_useScreenLayer( void )
int VR_useScreenLayer( void )
{
//intermission is never full screen
if ( cl.snap.ps.pm_type == PM_INTERMISSION )
{
return qfalse;
return 0;
}
int keyCatcher = Key_GetCatcher( );
return (bool)( clc.state == CA_CINEMATIC ||
return ( clc.state == CA_CINEMATIC ||
( keyCatcher & (KEYCATCH_UI | KEYCATCH_CONSOLE) ));
}
//#endif

View File

@ -12,8 +12,8 @@ void VR_EnterVR( engine_t* engine, ovrJava java );
void VR_LeaveVR( engine_t* engine );
engine_t* VR_GetEngine( void );
bool VR_isPauseable( void );
bool VR_useScreenLayer( void );
int VR_useScreenLayer( void );
int VR_isPauseable( void );
float radians(float deg);

View File

@ -7,8 +7,6 @@
#include "../client/keycodes.h"
#include "../client/client.h"
#include "vr_base.h"
#include "../VrApi/Include/VrApi_Input.h"
#include "../VrApi/Include/VrApi_Helpers.h"
#include "vr_clientinfo.h"
#include <unistd.h>
@ -20,6 +18,39 @@
# include <SDL.h>
#endif
//OpenXR
XrPath leftHandPath;
XrPath rightHandPath;
XrAction aimPoseAction;
XrAction gripPoseAction;
XrAction indexLeftAction;
XrAction indexRightAction;
XrAction menuAction;
XrAction buttonAAction;
XrAction buttonBAction;
XrAction buttonXAction;
XrAction buttonYAction;
XrAction gripLeftAction;
XrAction gripRightAction;
XrAction moveOnLeftJoystickAction;
XrAction moveOnRightJoystickAction;
XrAction thumbstickLeftClickAction;
XrAction thumbstickRightClickAction;
XrAction vibrateLeftFeedback;
XrAction vibrateRightFeedback;
XrActionSet runningActionSet;
XrSpace leftControllerAimSpace = XR_NULL_HANDLE;
XrSpace rightControllerAimSpace = XR_NULL_HANDLE;
XrSpace leftControllerGripSpace = XR_NULL_HANDLE;
XrSpace rightControllerGripSpace = XR_NULL_HANDLE;
qboolean inputInitialized = qfalse;
qboolean useSimpleProfile = qfalse;
typedef struct {
XrSpaceLocation loc;
XrSpaceVelocity vel;
} LocVel;
enum {
VR_TOUCH_AXIS_UP = 1 << 0,
VR_TOUCH_AXIS_UPRIGHT = 1 << 1,
@ -92,7 +123,7 @@ void rotateAboutOrigin(float x, float y, float rotation, vec2_t out)
out[1] = cosf(DEG2RAD(-rotation)) * y - sinf(DEG2RAD(-rotation)) * x;
}
static ovrVector3f normalizeVec(ovrVector3f vec) {
XrVector3f normalizeVec(XrVector3f vec) {
//NOTE: leave w-component untouched
//@@const float EPSILON = 0.000001f;
float xxyyzz = vec.x*vec.x + vec.y*vec.y + vec.z*vec.z;
@ -100,7 +131,7 @@ static ovrVector3f normalizeVec(ovrVector3f vec) {
//@@ return *this; // do nothing if it is zero vector
//float invLength = invSqrt(xxyyzz);
ovrVector3f result;
XrVector3f result;
float invLength = 1.0f / sqrtf(xxyyzz);
result.x = vec.x * invLength;
result.y = vec.y * invLength;
@ -123,7 +154,7 @@ void NormalizeAngles(vec3_t angles)
while (angles[2] < -180) angles[2] += 360;
}
void GetAnglesFromVectors(const ovrVector3f forward, const ovrVector3f right, const ovrVector3f up, vec3_t angles)
void GetAnglesFromVectors(const XrVector3f forward, const XrVector3f right, const XrVector3f up, vec3_t angles)
{
float sr, sp, sy, cr, cp, cy;
@ -172,7 +203,7 @@ void GetAnglesFromVectors(const ovrVector3f forward, const ovrVector3f right, co
NormalizeAngles(angles);
}
void QuatToYawPitchRoll(ovrQuatf q, vec3_t rotation, vec3_t out) {
void QuatToYawPitchRoll(XrQuaternionf q, vec3_t rotation, vec3_t out) {
ovrMatrix4f mat = ovrMatrix4f_CreateFromQuaternion( &q );
@ -182,21 +213,21 @@ void QuatToYawPitchRoll(ovrQuatf q, vec3_t rotation, vec3_t out) {
mat = ovrMatrix4f_Multiply(&mat, &rot);
}
ovrVector4f v1 = {0, 0, -1, 0};
ovrVector4f v2 = {1, 0, 0, 0};
ovrVector4f v3 = {0, 1, 0, 0};
XrVector4f v1 = {0, 0, -1, 0};
XrVector4f v2 = {1, 0, 0, 0};
XrVector4f v3 = {0, 1, 0, 0};
ovrVector4f forwardInVRSpace = ovrVector4f_MultiplyMatrix4f(&mat, &v1);
ovrVector4f rightInVRSpace = ovrVector4f_MultiplyMatrix4f(&mat, &v2);
ovrVector4f upInVRSpace = ovrVector4f_MultiplyMatrix4f(&mat, &v3);
XrVector4f forwardInVRSpace = XrVector4f_MultiplyMatrix4f(&mat, &v1);
XrVector4f rightInVRSpace = XrVector4f_MultiplyMatrix4f(&mat, &v2);
XrVector4f upInVRSpace = XrVector4f_MultiplyMatrix4f(&mat, &v3);
ovrVector3f forward = {-forwardInVRSpace.z, -forwardInVRSpace.x, forwardInVRSpace.y};
ovrVector3f right = {-rightInVRSpace.z, -rightInVRSpace.x, rightInVRSpace.y};
ovrVector3f up = {-upInVRSpace.z, -upInVRSpace.x, upInVRSpace.y};
XrVector3f forward = {-forwardInVRSpace.z, -forwardInVRSpace.x, forwardInVRSpace.y};
XrVector3f right = {-rightInVRSpace.z, -rightInVRSpace.x, rightInVRSpace.y};
XrVector3f up = {-upInVRSpace.z, -upInVRSpace.x, upInVRSpace.y};
ovrVector3f forwardNormal = normalizeVec(forward);
ovrVector3f rightNormal = normalizeVec(right);
ovrVector3f upNormal = normalizeVec(up);
XrVector3f forwardNormal = normalizeVec(forward);
XrVector3f rightNormal = normalizeVec(right);
XrVector3f upNormal = normalizeVec(up);
GetAnglesFromVectors(forwardNormal, rightNormal, upNormal, out);
}
@ -204,7 +235,6 @@ void QuatToYawPitchRoll(ovrQuatf q, vec3_t rotation, vec3_t out) {
//0 = left, 1 = right
float vibration_channel_duration[2] = {0.0f, 0.0f};
float vibration_channel_intensity[2] = {0.0f, 0.0f};
ovrDeviceID controllerIDs[2];
void VR_Vibrate( int duration, int chan, float intensity )
{
@ -235,8 +265,19 @@ static void VR_processHaptics() {
for (int i = 0; i < 2; ++i) {
if (vibration_channel_duration[i] > 0.0f ||
vibration_channel_duration[i] == -1.0f) {
vrapi_SetHapticVibrationSimple(VR_GetEngine()->ovr, controllerIDs[i],
vibration_channel_intensity[i]);
// fire haptics using output action
XrHapticVibration vibration = {};
vibration.type = XR_TYPE_HAPTIC_VIBRATION;
vibration.next = NULL;
vibration.amplitude = vibration_channel_intensity[i];
vibration.duration = ToXrTime(vibration_channel_duration[i]);
vibration.frequency = 3000;
XrHapticActionInfo hapticActionInfo = {};
hapticActionInfo.type = XR_TYPE_HAPTIC_ACTION_INFO;
hapticActionInfo.next = NULL;
hapticActionInfo.action = i == 0 ? vibrateLeftFeedback : vibrateRightFeedback;
OXR(xrApplyHapticFeedback(VR_GetEngine()->appState.Session, &hapticActionInfo, (const XrHapticBaseHeader*)&vibration));
if (vibration_channel_duration[i] != -1.0f) {
vibration_channel_duration[i] -= frametime;
@ -247,7 +288,12 @@ static void VR_processHaptics() {
}
}
} else {
vrapi_SetHapticVibrationSimple(VR_GetEngine()->ovr, controllerIDs[i], 0.0f);
// Stop haptics
XrHapticActionInfo hapticActionInfo = {};
hapticActionInfo.type = XR_TYPE_HAPTIC_ACTION_INFO;
hapticActionInfo.next = NULL;
hapticActionInfo.action = i == 0 ? vibrateLeftFeedback : vibrateRightFeedback;
OXR(xrStopHapticFeedback(VR_GetEngine()->appState.Session, &hapticActionInfo));
}
}
}
@ -474,50 +520,370 @@ void VR_HapticEvent(const char* event, int position, int flags, int intensity, f
}
}
XrSpace CreateActionSpace(XrAction poseAction, XrPath subactionPath) {
XrActionSpaceCreateInfo asci = {};
asci.type = XR_TYPE_ACTION_SPACE_CREATE_INFO;
asci.action = poseAction;
asci.poseInActionSpace.orientation.w = 1.0f;
asci.subactionPath = subactionPath;
XrSpace actionSpace = XR_NULL_HANDLE;
OXR(xrCreateActionSpace(VR_GetEngine()->appState.Session, &asci, &actionSpace));
return actionSpace;
}
XrActionSuggestedBinding ActionSuggestedBinding(XrAction action, const char* bindingString) {
XrActionSuggestedBinding asb;
asb.action = action;
XrPath bindingPath;
OXR(xrStringToPath(VR_GetEngine()->appState.Instance, bindingString, &bindingPath));
asb.binding = bindingPath;
return asb;
}
XrActionSet CreateActionSet(int priority, const char* name, const char* localizedName) {
XrActionSetCreateInfo asci = {};
asci.type = XR_TYPE_ACTION_SET_CREATE_INFO;
asci.next = NULL;
asci.priority = priority;
strcpy(asci.actionSetName, name);
strcpy(asci.localizedActionSetName, localizedName);
XrActionSet actionSet = XR_NULL_HANDLE;
OXR(xrCreateActionSet(VR_GetEngine()->appState.Instance, &asci, &actionSet));
return actionSet;
}
XrAction CreateAction(
XrActionSet actionSet,
XrActionType type,
const char* actionName,
const char* localizedName,
int countSubactionPaths,
XrPath* subactionPaths) {
ALOGV("CreateAction %s, %" PRIi32, actionName, countSubactionPaths);
XrActionCreateInfo aci = {};
aci.type = XR_TYPE_ACTION_CREATE_INFO;
aci.next = NULL;
aci.actionType = type;
if (countSubactionPaths > 0) {
aci.countSubactionPaths = countSubactionPaths;
aci.subactionPaths = subactionPaths;
}
strcpy(aci.actionName, actionName);
strcpy(aci.localizedActionName, localizedName ? localizedName : actionName);
XrAction action = XR_NULL_HANDLE;
OXR(xrCreateAction(actionSet, &aci, &action));
return action;
}
qboolean ActionPoseIsActive(XrAction action, XrPath subactionPath) {
XrActionStateGetInfo getInfo = {};
getInfo.type = XR_TYPE_ACTION_STATE_GET_INFO;
getInfo.action = action;
getInfo.subactionPath = subactionPath;
XrActionStatePose state = {};
state.type = XR_TYPE_ACTION_STATE_POSE;
OXR(xrGetActionStatePose(VR_GetEngine()->appState.Session, &getInfo, &state));
return state.isActive != XR_FALSE;
}
LocVel GetSpaceLocVel(XrSpace space, XrTime time) {
LocVel lv = {{}};
lv.loc.type = XR_TYPE_SPACE_LOCATION;
lv.loc.next = &lv.vel;
lv.vel.type = XR_TYPE_SPACE_VELOCITY;
OXR(xrLocateSpace(space, VR_GetEngine()->appState.CurrentSpace, time, &lv.loc));
lv.loc.next = NULL; // pointer no longer valid or necessary
return lv;
}
XrActionStateFloat GetActionStateFloat(XrAction action) {
XrActionStateGetInfo getInfo = {};
getInfo.type = XR_TYPE_ACTION_STATE_GET_INFO;
getInfo.action = action;
XrActionStateFloat state = {};
state.type = XR_TYPE_ACTION_STATE_FLOAT;
OXR(xrGetActionStateFloat(VR_GetEngine()->appState.Session, &getInfo, &state));
return state;
}
XrActionStateBoolean GetActionStateBoolean(XrAction action) {
XrActionStateGetInfo getInfo = {};
getInfo.type = XR_TYPE_ACTION_STATE_GET_INFO;
getInfo.action = action;
XrActionStateBoolean state = {};
state.type = XR_TYPE_ACTION_STATE_BOOLEAN;
OXR(xrGetActionStateBoolean(VR_GetEngine()->appState.Session, &getInfo, &state));
return state;
}
XrActionStateVector2f GetActionStateVector2(XrAction action) {
XrActionStateGetInfo getInfo = {};
getInfo.type = XR_TYPE_ACTION_STATE_GET_INFO;
getInfo.action = action;
XrActionStateVector2f state = {};
state.type = XR_TYPE_ACTION_STATE_VECTOR2F;
OXR(xrGetActionStateVector2f(VR_GetEngine()->appState.Session, &getInfo, &state));
return state;
}
void IN_VRInit( void )
{
if (inputInitialized)
return;
memset(&vr, 0, sizeof(vr));
engine_t *engine = VR_GetEngine();
callbackClass = (*engine->java.Env)->GetObjectClass(engine->java.Env, engine->java.ActivityObject);
android_haptic_event = (*engine->java.Env)->GetMethodID(engine->java.Env, callbackClass, "haptic_event","(Ljava/lang/String;IIIFF)V");
// Actions
runningActionSet = CreateActionSet(1, "running_action_set", "Action Set used on main loop");
indexLeftAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "index_left", "Index left", 0, NULL);
indexRightAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "index_right", "Index right", 0, NULL);
menuAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "menu_action", "Menu", 0, NULL);
buttonAAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "button_a", "Button A", 0, NULL);
buttonBAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "button_b", "Button B", 0, NULL);
buttonXAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "button_x", "Button X", 0, NULL);
buttonYAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "button_y", "Button Y", 0, NULL);
gripLeftAction = CreateAction(runningActionSet, XR_ACTION_TYPE_FLOAT_INPUT, "grip_left", "Grip left", 0, NULL);
gripRightAction = CreateAction(runningActionSet, XR_ACTION_TYPE_FLOAT_INPUT, "grip_right", "Grip right", 0, NULL);
moveOnLeftJoystickAction = CreateAction(runningActionSet, XR_ACTION_TYPE_VECTOR2F_INPUT, "move_on_left_joy", "Move on left Joy", 0, NULL);
moveOnRightJoystickAction = CreateAction(runningActionSet, XR_ACTION_TYPE_VECTOR2F_INPUT, "move_on_right_joy", "Move on right Joy", 0, NULL);
thumbstickLeftClickAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "thumbstick_left", "Thumbstick left", 0, NULL);
thumbstickRightClickAction = CreateAction(runningActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "thumbstick_right", "Thumbstick right", 0, NULL);
vibrateLeftFeedback = CreateAction(runningActionSet, XR_ACTION_TYPE_VIBRATION_OUTPUT, "vibrate_left_feedback", "Vibrate Left Controller Feedback", 0, NULL);
vibrateRightFeedback = CreateAction(runningActionSet, XR_ACTION_TYPE_VIBRATION_OUTPUT, "vibrate_right_feedback", "Vibrate Right Controller Feedback", 0, NULL);
OXR(xrStringToPath(engine->appState.Instance, "/user/hand/left", &leftHandPath));
OXR(xrStringToPath(engine->appState.Instance, "/user/hand/right", &rightHandPath));
XrPath handSubactionPaths[2] = {leftHandPath, rightHandPath};
aimPoseAction = CreateAction(runningActionSet, XR_ACTION_TYPE_POSE_INPUT, "aim_pose", NULL, 2, &handSubactionPaths[0]);
gripPoseAction = CreateAction(runningActionSet, XR_ACTION_TYPE_POSE_INPUT, "grip_pose", NULL, 2, &handSubactionPaths[0]);
XrPath interactionProfilePath = XR_NULL_PATH;
XrPath interactionProfilePathTouch = XR_NULL_PATH;
XrPath interactionProfilePathKHRSimple = XR_NULL_PATH;
OXR(xrStringToPath(engine->appState.Instance, "/interaction_profiles/oculus/touch_controller", &interactionProfilePathTouch));
OXR(xrStringToPath(engine->appState.Instance, "/interaction_profiles/khr/simple_controller", &interactionProfilePathKHRSimple));
// Toggle this to force simple as a first choice, otherwise use it as a last resort
if (useSimpleProfile) {
ALOGV("xrSuggestInteractionProfileBindings found bindings for Khronos SIMPLE controller");
interactionProfilePath = interactionProfilePathKHRSimple;
} else {
// Query Set
XrActionSet queryActionSet = CreateActionSet(1, "query_action_set", "Action Set used to query device caps");
XrAction dummyAction = CreateAction(queryActionSet, XR_ACTION_TYPE_BOOLEAN_INPUT, "dummy_action", "Dummy Action", 0, NULL);
// Map bindings
XrActionSuggestedBinding bindings[1];
int currBinding = 0;
bindings[currBinding++] = ActionSuggestedBinding(dummyAction, "/user/hand/right/input/system/click");
XrInteractionProfileSuggestedBinding suggestedBindings = {};
suggestedBindings.type = XR_TYPE_INTERACTION_PROFILE_SUGGESTED_BINDING;
suggestedBindings.next = NULL;
suggestedBindings.suggestedBindings = bindings;
suggestedBindings.countSuggestedBindings = currBinding;
// Try all
suggestedBindings.interactionProfile = interactionProfilePathTouch;
XrResult suggestTouchResult = xrSuggestInteractionProfileBindings(engine->appState.Instance, &suggestedBindings);
OXR(suggestTouchResult);
if (XR_SUCCESS == suggestTouchResult) {
ALOGV("xrSuggestInteractionProfileBindings found bindings for QUEST controller");
interactionProfilePath = interactionProfilePathTouch;
}
if (interactionProfilePath == XR_NULL_PATH) {
// Simple as a fallback
bindings[0] = ActionSuggestedBinding(dummyAction, "/user/hand/right/input/select/click");
suggestedBindings.interactionProfile = interactionProfilePathKHRSimple;
XrResult suggestKHRSimpleResult = xrSuggestInteractionProfileBindings(engine->appState.Instance, &suggestedBindings);
OXR(suggestKHRSimpleResult);
if (XR_SUCCESS == suggestKHRSimpleResult) {
ALOGV("xrSuggestInteractionProfileBindings found bindings for Khronos SIMPLE controller");
interactionProfilePath = interactionProfilePathKHRSimple;
} else {
ALOGE("xrSuggestInteractionProfileBindings did NOT find any bindings.");
assert(qfalse);
}
}
}
// Action creation
{
// Map bindings
XrActionSuggestedBinding bindings[32]; // large enough for all profiles
int currBinding = 0;
{
if (interactionProfilePath == interactionProfilePathTouch) {
bindings[currBinding++] = ActionSuggestedBinding(indexLeftAction, "/user/hand/left/input/trigger");
bindings[currBinding++] = ActionSuggestedBinding(indexRightAction, "/user/hand/right/input/trigger");
bindings[currBinding++] = ActionSuggestedBinding(menuAction, "/user/hand/left/input/menu/click");
bindings[currBinding++] = ActionSuggestedBinding(buttonXAction, "/user/hand/left/input/x/click");
bindings[currBinding++] = ActionSuggestedBinding(buttonYAction, "/user/hand/left/input/y/click");
bindings[currBinding++] = ActionSuggestedBinding(buttonAAction, "/user/hand/right/input/a/click");
bindings[currBinding++] = ActionSuggestedBinding(buttonBAction, "/user/hand/right/input/b/click");
bindings[currBinding++] = ActionSuggestedBinding(gripLeftAction, "/user/hand/left/input/squeeze/value");
bindings[currBinding++] = ActionSuggestedBinding(gripRightAction, "/user/hand/right/input/squeeze/value");
bindings[currBinding++] = ActionSuggestedBinding(moveOnLeftJoystickAction, "/user/hand/left/input/thumbstick");
bindings[currBinding++] = ActionSuggestedBinding(moveOnRightJoystickAction, "/user/hand/right/input/thumbstick");
bindings[currBinding++] = ActionSuggestedBinding(thumbstickLeftClickAction, "/user/hand/left/input/thumbstick/click");
bindings[currBinding++] = ActionSuggestedBinding(thumbstickRightClickAction, "/user/hand/right/input/thumbstick/click");
bindings[currBinding++] = ActionSuggestedBinding(vibrateLeftFeedback, "/user/hand/left/output/haptic");
bindings[currBinding++] = ActionSuggestedBinding(vibrateRightFeedback, "/user/hand/right/output/haptic");
bindings[currBinding++] = ActionSuggestedBinding(aimPoseAction, "/user/hand/left/input/aim/pose");
bindings[currBinding++] = ActionSuggestedBinding(aimPoseAction, "/user/hand/right/input/aim/pose");
bindings[currBinding++] = ActionSuggestedBinding(gripPoseAction, "/user/hand/left/input/grip/pose");
bindings[currBinding++] = ActionSuggestedBinding(gripPoseAction, "/user/hand/right/input/grip/pose");
}
if (interactionProfilePath == interactionProfilePathKHRSimple) {
bindings[currBinding++] = ActionSuggestedBinding(indexLeftAction, "/user/hand/left/input/select/click");
bindings[currBinding++] = ActionSuggestedBinding(indexRightAction, "/user/hand/right/input/select/click");
bindings[currBinding++] = ActionSuggestedBinding(buttonAAction, "/user/hand/left/input/menu/click");
bindings[currBinding++] = ActionSuggestedBinding(buttonXAction, "/user/hand/right/input/menu/click");
bindings[currBinding++] = ActionSuggestedBinding(vibrateLeftFeedback, "/user/hand/left/output/haptic");
bindings[currBinding++] = ActionSuggestedBinding(vibrateRightFeedback, "/user/hand/right/output/haptic");
bindings[currBinding++] = ActionSuggestedBinding(aimPoseAction, "/user/hand/left/input/aim/pose");
bindings[currBinding++] = ActionSuggestedBinding(aimPoseAction, "/user/hand/right/input/aim/pose");
bindings[currBinding++] = ActionSuggestedBinding(gripPoseAction, "/user/hand/left/input/grip/pose");
bindings[currBinding++] = ActionSuggestedBinding(gripPoseAction, "/user/hand/right/input/grip/pose");
}
}
XrInteractionProfileSuggestedBinding suggestedBindings = {};
suggestedBindings.type = XR_TYPE_INTERACTION_PROFILE_SUGGESTED_BINDING;
suggestedBindings.next = NULL;
suggestedBindings.interactionProfile = interactionProfilePath;
suggestedBindings.suggestedBindings = bindings;
suggestedBindings.countSuggestedBindings = currBinding;
OXR(xrSuggestInteractionProfileBindings(engine->appState.Instance, &suggestedBindings));
// Enumerate actions
XrPath actionPathsBuffer[32];
char stringBuffer[256];
XrAction actionsToEnumerate[] = {
indexLeftAction,
indexRightAction,
menuAction,
buttonAAction,
buttonBAction,
buttonXAction,
buttonYAction,
gripLeftAction,
gripRightAction,
moveOnLeftJoystickAction,
moveOnRightJoystickAction,
thumbstickLeftClickAction,
thumbstickRightClickAction,
vibrateLeftFeedback,
vibrateRightFeedback,
aimPoseAction,
gripPoseAction,
};
for (size_t i = 0; i < sizeof(actionsToEnumerate) / sizeof(actionsToEnumerate[0]); ++i) {
XrBoundSourcesForActionEnumerateInfo enumerateInfo = {};
enumerateInfo.type = XR_TYPE_BOUND_SOURCES_FOR_ACTION_ENUMERATE_INFO;
enumerateInfo.next = NULL;
enumerateInfo.action = actionsToEnumerate[i];
// Get Count
uint32_t countOutput = 0;
OXR(xrEnumerateBoundSourcesForAction(
engine->appState.Session, &enumerateInfo, 0 /* request size */, &countOutput, NULL));
ALOGV(
"xrEnumerateBoundSourcesForAction action=%lld count=%u",
(long long)enumerateInfo.action,
countOutput);
if (countOutput < 32) {
OXR(xrEnumerateBoundSourcesForAction(
engine->appState.Session, &enumerateInfo, 32, &countOutput, actionPathsBuffer));
for (uint32_t a = 0; a < countOutput; ++a) {
XrInputSourceLocalizedNameGetInfo nameGetInfo = {};
nameGetInfo.type = XR_TYPE_INPUT_SOURCE_LOCALIZED_NAME_GET_INFO;
nameGetInfo.next = NULL;
nameGetInfo.sourcePath = actionPathsBuffer[a];
nameGetInfo.whichComponents = XR_INPUT_SOURCE_LOCALIZED_NAME_USER_PATH_BIT |
XR_INPUT_SOURCE_LOCALIZED_NAME_INTERACTION_PROFILE_BIT |
XR_INPUT_SOURCE_LOCALIZED_NAME_COMPONENT_BIT;
uint32_t stringCount = 0u;
OXR(xrGetInputSourceLocalizedName(
engine->appState.Session, &nameGetInfo, 0, &stringCount, NULL));
if (stringCount < 256) {
OXR(xrGetInputSourceLocalizedName(
engine->appState.Session, &nameGetInfo, 256, &stringCount, stringBuffer));
char pathStr[256];
uint32_t strLen = 0;
OXR(xrPathToString(
engine->appState.Instance,
actionPathsBuffer[a],
(uint32_t)sizeof(pathStr),
&strLen,
pathStr));
ALOGV(
" -> path = %lld `%s` -> `%s`",
(long long)actionPathsBuffer[a],
pathStr,
stringBuffer);
}
}
}
}
}
inputInitialized = qtrue;
}
static void IN_VRController( qboolean isRightController, ovrTracking remoteTracking )
static void IN_VRController( qboolean isRightController, XrPosef pose )
{
//Set gun angles - We need to calculate all those we might need (including adjustments) for the client to then take its pick
vec3_t rotation = {0};
if (isRightController == (vr_righthanded->integer != 0))
{
//Set gun angles - We need to calculate all those we might need (including adjustments) for the client to then take its pick
vec3_t rotation = {0};
rotation[PITCH] = vr_weaponPitch->value;
QuatToYawPitchRoll(remoteTracking.HeadPose.Pose.Orientation, rotation, vr.weaponangles);
VectorSubtract(vr.weaponangles_last, vr.weaponangles, vr.weaponangles_delta);
VectorCopy(vr.weaponangles, vr.weaponangles_last);
///Weapon location relative to view
vr.weaponposition[0] = remoteTracking.HeadPose.Pose.Position.x;
vr.weaponposition[1] = remoteTracking.HeadPose.Pose.Position.y + vr_heightAdjust->value;
vr.weaponposition[2] = remoteTracking.HeadPose.Pose.Position.z;
VectorCopy(vr.weaponoffset_last[1], vr.weaponoffset_last[0]);
VectorCopy(vr.weaponoffset, vr.weaponoffset_last[1]);
VectorSubtract(vr.weaponposition, vr.hmdposition, vr.weaponoffset);
} else {
vec3_t rotation = {0};
QuatToYawPitchRoll(remoteTracking.HeadPose.Pose.Orientation, rotation, vr.offhandangles2); // used for off-hand direction mode
{
//Set gun angles - We need to calculate all those we might need (including adjustments) for the client to then take its pick
rotation[PITCH] = vr_weaponPitch->value;
QuatToYawPitchRoll(remoteTracking.HeadPose.Pose.Orientation, rotation, vr.offhandangles);
QuatToYawPitchRoll(pose.orientation, rotation, vr.weaponangles);
VectorSubtract(vr.weaponangles_last, vr.weaponangles, vr.weaponangles_delta);
VectorCopy(vr.weaponangles, vr.weaponangles_last);
///Weapon location relative to view
vr.weaponposition[0] = pose.position.x;
vr.weaponposition[1] = pose.position.y + vr_heightAdjust->value;
vr.weaponposition[2] = pose.position.z;
VectorCopy(vr.weaponoffset_last[1], vr.weaponoffset_last[0]);
VectorCopy(vr.weaponoffset, vr.weaponoffset_last[1]);
VectorSubtract(vr.weaponposition, vr.hmdposition, vr.weaponoffset);
} else {
QuatToYawPitchRoll(pose.orientation, rotation, vr.offhandangles2); // used for off-hand direction mode
rotation[PITCH] = vr_weaponPitch->value;
QuatToYawPitchRoll(pose.orientation, rotation, vr.offhandangles);
///location relative to view
vr.offhandposition[0] = remoteTracking.HeadPose.Pose.Position.x;
vr.offhandposition[1] = remoteTracking.HeadPose.Pose.Position.y + vr_heightAdjust->value;
vr.offhandposition[2] = remoteTracking.HeadPose.Pose.Position.z;
vr.weaponposition[0] = pose.position.x;
vr.weaponposition[1] = pose.position.y + vr_heightAdjust->value;
vr.weaponposition[2] = pose.position.z;
VectorCopy(vr.offhandoffset_last[1], vr.offhandoffset_last[0]);
VectorCopy(vr.offhandoffset, vr.offhandoffset_last[1]);
VectorSubtract(vr.offhandposition, vr.hmdposition, vr.offhandoffset);
}
}
if (vr.virtual_screen || cl.snap.ps.pm_type == PM_INTERMISSION)
{
@ -737,7 +1103,8 @@ static void IN_VRJoystick( qboolean isRightController, float joystickX, float jo
else
{
//Positional movement speed correction for when we are not hitting target framerate
int refresh = vrapi_GetSystemPropertyInt(&(VR_GetEngine()->java), VRAPI_SYS_PROP_DISPLAY_REFRESH_RATE);
float refresh;
VR_GetEngine()->appState.pfnGetDisplayRefreshRate(VR_GetEngine()->appState.Session, &refresh);
float multiplier = (float)((1000.0 / refresh) / (in_vrEventTime - lastframetime));
float factor = (refresh / 72.0F) * 10.0f; // adjust positional factor based on refresh rate
@ -935,7 +1302,6 @@ static void IN_VRButtons( qboolean isRightController, uint32_t buttons )
} else {
IN_HandleInactiveInput(&controller->buttons, ovrButton_Y, "Y", 0, qfalse);
}
}
void IN_VRInputFrame( void )
@ -945,27 +1311,17 @@ void IN_VRInputFrame( void )
memset(&rightController, 0, sizeof(rightController));
controllerInit = qtrue;
}
engine_t* engine = VR_GetEngine();
ovrMobile* ovr = VR_GetEngine()->ovr;
if (!ovr) {
return;
}
ovrResult result;
if (vr_extralatencymode != NULL &&
vr_extralatencymode->integer) {
result = vrapi_SetExtraLatencyMode(VR_GetEngine()->ovr, VRAPI_EXTRA_LATENCY_MODE_ON);
assert(result == VRAPI_INITIALIZE_SUCCESS);
//TODO:vrapi_SetExtraLatencyMode(VR_GetEngine()->ovr, VRAPI_EXTRA_LATENCY_MODE_ON);
}
if (vr_refreshrate != NULL && vr_refreshrate->integer)
{
vrapi_SetDisplayRefreshRate(VR_GetEngine()->ovr, (float)vr_refreshrate->integer);
if (vr_refreshrate != NULL && vr_refreshrate->integer) {
OXR(engine->appState.pfnRequestDisplayRefreshRate(engine->appState.Session, (float)vr_refreshrate->integer));
}
result = vrapi_SetClockLevels(VR_GetEngine()->ovr, 4, 4);
assert(result == VRAPI_INITIALIZE_SUCCESS);
vr.virtual_screen = VR_useScreenLayer();
VR_processHaptics();
@ -976,10 +1332,14 @@ void IN_VRInputFrame( void )
{
// We extract Yaw, Pitch, Roll instead of directly using the orientation
// to allow "additional" yaw manipulation with mouse/controller.
const ovrQuatf quatHmd = VR_GetEngine()->tracking.HeadPose.Pose.Orientation;
const ovrVector3f positionHmd = VR_GetEngine()->tracking.HeadPose.Pose.Position;
XrSpaceLocation loc = {};
loc.type = XR_TYPE_SPACE_LOCATION;
OXR(xrLocateSpace(VR_GetEngine()->appState.HeadSpace, VR_GetEngine()->appState.CurrentSpace, VR_GetEngine()->predictedDisplayTime, &loc));
XrPosef xfStageFromHead = loc.pose;
const XrQuaternionf quatHmd = xfStageFromHead.orientation;
const XrVector3f positionHmd = xfStageFromHead.position;
vec3_t rotation = {0, 0, 0};
QuatToYawPitchRoll(quatHmd, rotation, vr.hmdorientation);
QuatToYawPitchRoll(quatHmd, rotation, vr.hmdorientation);
VectorSet(vr.hmdposition, positionHmd.x, positionHmd.y + vr_heightAdjust->value, positionHmd.z);
//Position
@ -1000,60 +1360,105 @@ void IN_VRInputFrame( void )
vr.clientview_yaw_last = clientview_yaw;
}
ovrInputCapabilityHeader capsHeader;
uint32_t index = 0;
for (;;) {
ovrResult enumResult = vrapi_EnumerateInputDevices(ovr, index, &capsHeader);
if (enumResult < 0) {
break;
}
++index;
if (capsHeader.Type != ovrControllerType_TrackedRemote) {
continue;
}
if (leftControllerAimSpace == XR_NULL_HANDLE) {
leftControllerAimSpace = CreateActionSpace(aimPoseAction, leftHandPath);
}
if (rightControllerAimSpace == XR_NULL_HANDLE) {
rightControllerAimSpace = CreateActionSpace(aimPoseAction, rightHandPath);
}
if (leftControllerGripSpace == XR_NULL_HANDLE) {
leftControllerGripSpace = CreateActionSpace(gripPoseAction, leftHandPath);
}
if (rightControllerGripSpace == XR_NULL_HANDLE) {
rightControllerGripSpace = CreateActionSpace(gripPoseAction, rightHandPath);
}
ovrInputTrackedRemoteCapabilities caps;
caps.Header = capsHeader;
ovrResult capsResult = vrapi_GetInputDeviceCapabilities(ovr, &caps.Header);
if (capsResult < 0) {
continue;
}
// update input information
XrAction controller[] = {aimPoseAction, gripPoseAction, aimPoseAction, gripPoseAction};
XrPath subactionPath[] = {leftHandPath, leftHandPath, rightHandPath, rightHandPath};
XrSpace controllerSpace[] = {
leftControllerAimSpace,
leftControllerGripSpace,
rightControllerAimSpace,
rightControllerGripSpace,
};
for (int i = 0; i < 4; i++) {
if (ActionPoseIsActive(controller[i], subactionPath[i])) {
LocVel lv = GetSpaceLocVel(controllerSpace[i], VR_GetEngine()->predictedDisplayTime);
VR_GetEngine()->appState.TrackedController[i].Active = (lv.loc.locationFlags & XR_SPACE_LOCATION_POSITION_VALID_BIT) != 0;
VR_GetEngine()->appState.TrackedController[i].Pose = lv.loc.pose;
for (int j = 0; j < 3; j++) {
float dt = 0.01f; // use 0.2f for for testing velocity vectors
(&VR_GetEngine()->appState.TrackedController[i].Pose.position.x)[j] += (&lv.vel.linearVelocity.x)[j] * dt;
}
} else {
ovrTrackedController_Clear(&VR_GetEngine()->appState.TrackedController[i]);
}
}
ovrInputStateTrackedRemote state;
state.Header.ControllerType = ovrControllerType_TrackedRemote;
ovrResult stateResult = vrapi_GetCurrentInputState(ovr, capsHeader.DeviceID, &state.Header);
if (stateResult < 0) {
continue;
}
// OpenXR input
{
// Attach to session
XrSessionActionSetsAttachInfo attachInfo = {};
attachInfo.type = XR_TYPE_SESSION_ACTION_SETS_ATTACH_INFO;
attachInfo.next = NULL;
attachInfo.countActionSets = 1;
attachInfo.actionSets = &runningActionSet;
OXR(xrAttachSessionActionSets(engine->appState.Session, &attachInfo));
ovrTracking remoteTracking;
stateResult = vrapi_GetInputTrackingState(ovr, capsHeader.DeviceID, VR_GetEngine()->predictedDisplayTime,
&remoteTracking);
if (stateResult < 0) {
continue;
}
// sync action data
XrActiveActionSet activeActionSet = {};
activeActionSet.actionSet = runningActionSet;
activeActionSet.subactionPath = XR_NULL_PATH;
qboolean isRight;
vrController_t* controller;
if (caps.ControllerCapabilities & ovrControllerCaps_LeftHand) {
isRight = qfalse;
controller = &leftController;
controllerIDs[0] = capsHeader.DeviceID;
} else if (caps.ControllerCapabilities & ovrControllerCaps_RightHand) {
isRight = qtrue;
controller = &rightController;
controllerIDs[1] = capsHeader.DeviceID;
}
else {
continue;
}
XrActionsSyncInfo syncInfo = {};
syncInfo.type = XR_TYPE_ACTIONS_SYNC_INFO;
syncInfo.next = NULL;
syncInfo.countActiveActionSets = 1;
syncInfo.activeActionSets = &activeActionSet;
OXR(xrSyncActions(engine->appState.Session, &syncInfo));
IN_VRButtons(isRight, state.Buttons);
IN_VRController(isRight, remoteTracking);
IN_VRJoystick(isRight, state.Joystick.x, state.Joystick.y);
IN_VRTriggers(isRight, state.IndexTrigger);
}
// query input action states
XrActionStateGetInfo getInfo = {};
getInfo.type = XR_TYPE_ACTION_STATE_GET_INFO;
getInfo.next = NULL;
getInfo.subactionPath = XR_NULL_PATH;
}
//button mapping
uint32_t lButtons = 0;
if (GetActionStateBoolean(menuAction).currentState) lButtons |= ovrButton_Enter;
if (GetActionStateBoolean(buttonXAction).currentState) lButtons |= ovrButton_X;
if (GetActionStateBoolean(buttonYAction).currentState) lButtons |= ovrButton_Y;
if (GetActionStateFloat(gripLeftAction).currentState > 0.5f) lButtons |= ovrButton_GripTrigger;
if (GetActionStateBoolean(thumbstickLeftClickAction).currentState) lButtons |= ovrButton_LThumb;
IN_VRButtons(qfalse, lButtons);
uint32_t rButtons = 0;
if (GetActionStateBoolean(buttonAAction).currentState) rButtons |= ovrButton_A;
if (GetActionStateBoolean(buttonBAction).currentState) rButtons |= ovrButton_B;
if (GetActionStateFloat(gripRightAction).currentState > 0.5f) rButtons |= ovrButton_GripTrigger;
if (GetActionStateBoolean(thumbstickRightClickAction).currentState) rButtons |= ovrButton_RThumb;
IN_VRButtons(qtrue, rButtons);
//index finger click
XrActionStateBoolean indexState;
indexState = GetActionStateBoolean(indexLeftAction);
IN_VRTriggers(qfalse, indexState.currentState ? 1 : 0);
indexState = GetActionStateBoolean(indexRightAction);
IN_VRTriggers(qtrue, indexState.currentState ? 1 : 0);
//thumbstick
XrActionStateVector2f moveJoystickState;
moveJoystickState = GetActionStateVector2(moveOnLeftJoystickAction);
IN_VRJoystick(qfalse, moveJoystickState.currentState.x, moveJoystickState.currentState.y);
moveJoystickState = GetActionStateVector2(moveOnRightJoystickAction);
IN_VRJoystick(qtrue, moveJoystickState.currentState.x, moveJoystickState.currentState.y);
//controller pose
if (engine->appState.TrackedController[0].Active)
IN_VRController(qfalse, engine->appState.TrackedController[0].Pose);
if (engine->appState.TrackedController[2].Active)
IN_VRController(qtrue, engine->appState.TrackedController[2].Pose);
lastframetime = in_vrEventTime;
in_vrEventTime = Sys_Milliseconds( );

View File

@ -4,22 +4,10 @@
#include "../qcommon/q_shared.h"
#include "../qcommon/qcommon.h"
#include "../client/client.h"
#include "../VrApi/Include/VrApi_Types.h"
#include "vr_clientinfo.h"
#include "vr_types.h"
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES3/gl3.h>
#include <GLES3/gl3ext.h>
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wstrict-prototypes"
#include <VrApi.h>
#include <VrApi_Helpers.h>
#pragma clang diagnostic pop
#include <assert.h>
#include <stdlib.h>
#include <string.h>
@ -30,10 +18,28 @@
#include <GLES3/gl32.h>
#endif
#define SUPER_SAMPLE 1.15f
extern vr_clientinfo_t vr;
XrView* projections;
void VR_UpdateStageBounds(ovrApp* pappState) {
XrExtent2Df stageBounds = {};
XrResult result;
OXR(result = xrGetReferenceSpaceBoundsRect(
pappState->Session, XR_REFERENCE_SPACE_TYPE_STAGE, &stageBounds));
if (result != XR_SUCCESS) {
ALOGV("Stage bounds query failed: using small defaults");
stageBounds.width = 1.0f;
stageBounds.height = 1.0f;
pappState->CurrentSpace = pappState->FakeStageSpace;
}
ALOGV("Stage bounds: width = %f, depth %f", stageBounds.width, stageBounds.height);
}
void APIENTRY VR_GLDebugLog(GLenum source, GLenum type, GLuint id,
GLenum severity, GLsizei length, const GLchar* message, const void* userParam)
{
@ -71,11 +77,79 @@ void VR_GetResolution(engine_t* engine, int *pWidth, int *pHeight)
if (engine)
{
*pWidth = width = vrapi_GetSystemPropertyInt(&engine->java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH) * SUPER_SAMPLE;
*pHeight = height = vrapi_GetSystemPropertyInt(&engine->java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_HEIGHT) * SUPER_SAMPLE;
// Enumerate the viewport configurations.
uint32_t viewportConfigTypeCount = 0;
OXR(xrEnumerateViewConfigurations(
engine->appState.Instance, engine->appState.SystemId, 0, &viewportConfigTypeCount, NULL));
vr.fov_x = vrapi_GetSystemPropertyInt( &engine->java, VRAPI_SYS_PROP_SUGGESTED_EYE_FOV_DEGREES_X);
vr.fov_y = vrapi_GetSystemPropertyInt( &engine->java, VRAPI_SYS_PROP_SUGGESTED_EYE_FOV_DEGREES_Y);
XrViewConfigurationType* viewportConfigurationTypes =
(XrViewConfigurationType*)malloc(viewportConfigTypeCount * sizeof(XrViewConfigurationType));
OXR(xrEnumerateViewConfigurations(
engine->appState.Instance,
engine->appState.SystemId,
viewportConfigTypeCount,
&viewportConfigTypeCount,
viewportConfigurationTypes));
ALOGV("Available Viewport Configuration Types: %d", viewportConfigTypeCount);
for (uint32_t i = 0; i < viewportConfigTypeCount; i++) {
const XrViewConfigurationType viewportConfigType = viewportConfigurationTypes[i];
ALOGV(
"Viewport configuration type %d : %s",
viewportConfigType,
viewportConfigType == XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO ? "Selected" : "");
XrViewConfigurationProperties viewportConfig;
viewportConfig.type = XR_TYPE_VIEW_CONFIGURATION_PROPERTIES;
OXR(xrGetViewConfigurationProperties(
engine->appState.Instance, engine->appState.SystemId, viewportConfigType, &viewportConfig));
ALOGV(
"FovMutable=%s ConfigurationType %d",
viewportConfig.fovMutable ? "true" : "false",
viewportConfig.viewConfigurationType);
uint32_t viewCount;
OXR(xrEnumerateViewConfigurationViews(
engine->appState.Instance, engine->appState.SystemId, viewportConfigType, 0, &viewCount, NULL));
if (viewCount > 0) {
XrViewConfigurationView* elements =
(XrViewConfigurationView*)malloc(viewCount * sizeof(XrViewConfigurationView));
for (uint32_t e = 0; e < viewCount; e++) {
elements[e].type = XR_TYPE_VIEW_CONFIGURATION_VIEW;
elements[e].next = NULL;
}
OXR(xrEnumerateViewConfigurationViews(
engine->appState.Instance,
engine->appState.SystemId,
viewportConfigType,
viewCount,
&viewCount,
elements));
// Cache the view config properties for the selected config type.
if (viewportConfigType == XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO) {
assert(viewCount == ovrMaxNumEyes);
for (uint32_t e = 0; e < viewCount; e++) {
engine->appState.ViewConfigurationView[e] = elements[e];
}
}
free(elements);
} else {
ALOGE("Empty viewport configuration type: %d", viewCount);
}
}
free(viewportConfigurationTypes);
*pWidth = width = engine->appState.ViewConfigurationView[0].recommendedImageRectWidth;
*pHeight = height = engine->appState.ViewConfigurationView[0].recommendedImageRectHeight;
}
else
{
@ -85,313 +159,348 @@ void VR_GetResolution(engine_t* engine, int *pWidth, int *pHeight)
}
}
typedef void(GL_APIENTRY* PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC)(
GLenum target,
GLenum attachment,
GLuint texture,
GLint level,
GLint baseViewIndex,
GLsizei numViews);
void VR_InitRenderer( engine_t* engine ) {
#if ENABLE_GL_DEBUG
glEnable(GL_DEBUG_OUTPUT);
glDebugMessageCallback(VR_GLDebugLog, 0);
#endif
PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC glFramebufferTextureMultiviewOVR =
(PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC)eglGetProcAddress(
"glFramebufferTextureMultiviewOVR");
int eyeW, eyeH;
VR_GetResolution(engine, &eyeW, &eyeH);
//for (int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; ++eye)
{
framebuffer_t* framebuffer = &engine->framebuffers;
framebuffer->colorTexture = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D_ARRAY, GL_RGBA8,
eyeW, eyeH, 1, 3);
framebuffer->swapchainLength = vrapi_GetTextureSwapChainLength(framebuffer->colorTexture);
framebuffer->depthBuffers = (GLuint*)malloc(framebuffer->swapchainLength * sizeof(GLuint));
framebuffer->framebuffers = (GLuint*)malloc(framebuffer->swapchainLength * sizeof(GLuint));
for (int index = 0; index < framebuffer->swapchainLength; ++index) {
GLuint colorTexture;
GLenum framebufferStatus;
// Get the viewport configuration info for the chosen viewport configuration type.
engine->appState.ViewportConfig.type = XR_TYPE_VIEW_CONFIGURATION_PROPERTIES;
colorTexture = vrapi_GetTextureSwapChainHandle(framebuffer->colorTexture, index);
glBindTexture(GL_TEXTURE_2D_ARRAY, colorTexture);
GLfloat borderColor[] = {0.0f, 0.0f, 0.0f, 0.0f};
glTexParameterfv(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_BORDER_COLOR, borderColor);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D_ARRAY, 0);
OXR(xrGetViewConfigurationProperties(
engine->appState.Instance, engine->appState.SystemId, XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO, &engine->appState.ViewportConfig));
glGenTextures(1, &framebuffer->depthBuffers[index]);
glBindTexture(GL_TEXTURE_2D_ARRAY, framebuffer->depthBuffers[index]);
glTexStorage3D(GL_TEXTURE_2D_ARRAY, 1, GL_DEPTH_COMPONENT24, eyeW, eyeH, 2);
glBindTexture(GL_TEXTURE_2D_ARRAY, 0);
// Get the supported display refresh rates for the system.
{
PFN_xrEnumerateDisplayRefreshRatesFB pfnxrEnumerateDisplayRefreshRatesFB = NULL;
OXR(xrGetInstanceProcAddr(
engine->appState.Instance,
"xrEnumerateDisplayRefreshRatesFB",
(PFN_xrVoidFunction*)(&pfnxrEnumerateDisplayRefreshRatesFB)));
glGenFramebuffers(1, &framebuffer->framebuffers[index]);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, framebuffer->framebuffers[index]);
OXR(pfnxrEnumerateDisplayRefreshRatesFB(
engine->appState.Session, 0, &engine->appState.NumSupportedDisplayRefreshRates, NULL));
glFramebufferTextureMultiviewOVR(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT,
framebuffer->depthBuffers[index], 0, 0, 2);
glFramebufferTextureMultiviewOVR(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
colorTexture, 0, 0, 2);
engine->appState.SupportedDisplayRefreshRates =
(float*)malloc(engine->appState.NumSupportedDisplayRefreshRates * sizeof(float));
OXR(pfnxrEnumerateDisplayRefreshRatesFB(
engine->appState.Session,
engine->appState.NumSupportedDisplayRefreshRates,
&engine->appState.NumSupportedDisplayRefreshRates,
engine->appState.SupportedDisplayRefreshRates));
ALOGV("Supported Refresh Rates:");
for (uint32_t i = 0; i < engine->appState.NumSupportedDisplayRefreshRates; i++) {
ALOGV("%d:%f", i, engine->appState.SupportedDisplayRefreshRates[i]);
}
framebufferStatus = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER);
assert(framebufferStatus == GL_FRAMEBUFFER_COMPLETE);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
}
OXR(xrGetInstanceProcAddr(
engine->appState.Instance,
"xrGetDisplayRefreshRateFB",
(PFN_xrVoidFunction*)(&engine->appState.pfnGetDisplayRefreshRate)));
float currentDisplayRefreshRate = 0.0f;
OXR(engine->appState.pfnGetDisplayRefreshRate(engine->appState.Session, &currentDisplayRefreshRate));
ALOGV("Current System Display Refresh Rate: %f", currentDisplayRefreshRate);
OXR(xrGetInstanceProcAddr(
engine->appState.Instance,
"xrRequestDisplayRefreshRateFB",
(PFN_xrVoidFunction*)(&engine->appState.pfnRequestDisplayRefreshRate)));
// Test requesting the system default.
OXR(engine->appState.pfnRequestDisplayRefreshRate(engine->appState.Session, 0.0f));
ALOGV("Requesting system default display refresh rate");
}
uint32_t numOutputSpaces = 0;
OXR(xrEnumerateReferenceSpaces(engine->appState.Session, 0, &numOutputSpaces, NULL));
XrReferenceSpaceType* referenceSpaces =
(XrReferenceSpaceType*)malloc(numOutputSpaces * sizeof(XrReferenceSpaceType));
OXR(xrEnumerateReferenceSpaces(
engine->appState.Session, numOutputSpaces, &numOutputSpaces, referenceSpaces));
GLboolean stageSupported = GL_FALSE;
for (uint32_t i = 0; i < numOutputSpaces; i++) {
if (referenceSpaces[i] == XR_REFERENCE_SPACE_TYPE_STAGE) {
stageSupported = GL_TRUE;
break;
}
}
free(referenceSpaces);
// Create a space to the first path
XrReferenceSpaceCreateInfo spaceCreateInfo = {};
spaceCreateInfo.type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO;
spaceCreateInfo.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_VIEW;
spaceCreateInfo.poseInReferenceSpace.orientation.w = 1.0f;
OXR(xrCreateReferenceSpace(engine->appState.Session, &spaceCreateInfo, &engine->appState.HeadSpace));
spaceCreateInfo.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_LOCAL;
OXR(xrCreateReferenceSpace(engine->appState.Session, &spaceCreateInfo, &engine->appState.LocalSpace));
// Create a default stage space to use if SPACE_TYPE_STAGE is not
// supported, or calls to xrGetReferenceSpaceBoundsRect fail.
{
spaceCreateInfo.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_LOCAL;
spaceCreateInfo.poseInReferenceSpace.position.y = -1.6750f;
OXR(xrCreateReferenceSpace(engine->appState.Session, &spaceCreateInfo, &engine->appState.FakeStageSpace));
ALOGV("Created fake stage space from local space with offset");
engine->appState.CurrentSpace = engine->appState.FakeStageSpace;
}
if (stageSupported) {
spaceCreateInfo.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_STAGE;
spaceCreateInfo.poseInReferenceSpace.position.y = 0.0f;
OXR(xrCreateReferenceSpace(engine->appState.Session, &spaceCreateInfo, &engine->appState.StageSpace));
ALOGV("Created stage space");
engine->appState.CurrentSpace = engine->appState.StageSpace;
}
projections = (XrView*)(malloc(ovrMaxNumEyes * sizeof(XrView)));
ovrRenderer_Create(
engine->appState.Session,
&engine->appState.Renderer,
engine->appState.ViewConfigurationView[0].recommendedImageRectWidth,
engine->appState.ViewConfigurationView[0].recommendedImageRectHeight);
}
void VR_DestroyRenderer( engine_t* engine ) {
for (int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; ++eye)
{
if (engine->framebuffers.swapchainLength > 0) {
glDeleteFramebuffers(engine->framebuffers.swapchainLength,
engine->framebuffers.depthBuffers);
free(engine->framebuffers.depthBuffers);
free(engine->framebuffers.framebuffers);
vrapi_DestroyTextureSwapChain(engine->framebuffers.colorTexture);
memset(&engine->framebuffers, 0, sizeof(engine->framebuffers));
}
}
void VR_DestroyRenderer( engine_t* engine )
{
ovrRenderer_Destroy(&engine->appState.Renderer);
free(projections);
}
void VR_ReInitRenderer()
{
VR_DestroyRenderer( VR_GetEngine() );
VR_InitRenderer( VR_GetEngine() );
}
// Assumes landscape cylinder shape.
static ovrMatrix4f CylinderModelMatrix( const int texWidth, const int texHeight,
const ovrVector3f translation,
const float rotateYaw,
const float rotatePitch,
const float radius,
const float density )
void VR_ClearFrameBuffer( int width, int height)
{
const ovrMatrix4f scaleMatrix = ovrMatrix4f_CreateScale( radius, radius * (float)texHeight * VRAPI_PI / density, radius );
const ovrMatrix4f transMatrix = ovrMatrix4f_CreateTranslation( translation.x, translation.y, translation.z );
const ovrMatrix4f rotXMatrix = ovrMatrix4f_CreateRotation( rotateYaw, 0.0f, 0.0f );
const ovrMatrix4f rotYMatrix = ovrMatrix4f_CreateRotation( 0.0f, rotatePitch, 0.0f );
const ovrMatrix4f m0 = ovrMatrix4f_Multiply( &transMatrix, &scaleMatrix );
const ovrMatrix4f m1 = ovrMatrix4f_Multiply( &rotXMatrix, &m0 );
const ovrMatrix4f m2 = ovrMatrix4f_Multiply( &rotYMatrix, &m1 );
return m2;
}
extern cvar_t *vr_screen_dist;
ovrLayerCylinder2 BuildCylinderLayer(engine_t* engine, const int textureWidth, const int textureHeight,
const ovrTracking2 * tracking, float rotatePitch )
{
ovrLayerCylinder2 layer = vrapi_DefaultLayerCylinder2();
const float fadeLevel = 1.0f;
layer.Header.ColorScale.x =
layer.Header.ColorScale.y =
layer.Header.ColorScale.z =
layer.Header.ColorScale.w = fadeLevel;
layer.Header.SrcBlend = VRAPI_FRAME_LAYER_BLEND_SRC_ALPHA;
layer.Header.DstBlend = VRAPI_FRAME_LAYER_BLEND_ONE_MINUS_SRC_ALPHA;
//layer.Header.Flags = VRAPI_FRAME_LAYER_FLAG_CLIP_TO_TEXTURE_RECT;
layer.HeadPose = tracking->HeadPose;
const float density = 4500.0f;
const float rotateYaw = 0.0f;
const float radius = 12.0f;
const float distance = -16.0f;
const ovrVector3f translation = { 0.0f, 1.0f, distance };
ovrMatrix4f cylinderTransform =
CylinderModelMatrix( textureWidth, textureHeight, translation,
rotateYaw, rotatePitch, radius, density );
const float circScale = density * 0.5f / textureWidth;
const float circBias = -circScale * ( 0.5f * ( 1.0f - 1.0f / circScale ) );
for ( int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; eye++ )
{
ovrMatrix4f modelViewMatrix = ovrMatrix4f_Multiply( &tracking->Eye[eye].ViewMatrix, &cylinderTransform );
layer.Textures[eye].TexCoordsFromTanAngles = ovrMatrix4f_Inverse( &modelViewMatrix );
layer.Textures[eye].ColorSwapChain = engine->framebuffers.colorTexture;
layer.Textures[eye].SwapChainIndex = engine->framebuffers.swapchainIndex;
// Texcoord scale and bias is just a representation of the aspect ratio. The positioning
// of the cylinder is handled entirely by the TexCoordsFromTanAngles matrix.
const float texScaleX = circScale;
const float texBiasX = circBias;
const float texScaleY = -0.5f;
const float texBiasY = texScaleY * ( 0.5f * ( 1.0f - ( 1.0f / texScaleY ) ) );
layer.Textures[eye].TextureMatrix.M[0][0] = texScaleX;
layer.Textures[eye].TextureMatrix.M[0][2] = texBiasX;
layer.Textures[eye].TextureMatrix.M[1][1] = texScaleY;
layer.Textures[eye].TextureMatrix.M[1][2] = -texBiasY;
layer.Textures[eye].TextureRect.width = 1.0f;
layer.Textures[eye].TextureRect.height = 1.0f;
}
return layer;
}
void VR_ClearFrameBuffer( GLuint frameBuffer, int width, int height)
{
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, frameBuffer );
glEnable( GL_SCISSOR_TEST );
glViewport( 0, 0, width, height );
if (Cvar_VariableIntegerValue("vr_thirdPersonSpectator"))
{
//Blood red.. ish
glClearColor( 0.12f, 0.0f, 0.05f, 1.0f );
}
else
{
//Black
glClearColor( 0.0f, 0.0f, 0.0f, 1.0f );
}
if (Cvar_VariableIntegerValue("vr_thirdPersonSpectator"))
{
//Blood red.. ish
glClearColor( 0.12f, 0.0f, 0.05f, 1.0f );
}
else
{
//Black
glClearColor( 0.0f, 0.0f, 0.0f, 1.0f );
}
glScissor( 0, 0, width, height );
glClear( GL_COLOR_BUFFER_BIT );
glScissor( 0, 0, 0, 0 );
glDisable( GL_SCISSOR_TEST );
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, 0 );
}
void VR_DrawFrame( engine_t* engine ) {
if (!engine->ovr)
{
return;
}
++engine->frameIndex;
engine->predictedDisplayTime = vrapi_GetPredictedDisplayTime(engine->ovr, engine->frameIndex);
engine->tracking = vrapi_GetPredictedTracking2(engine->ovr, engine->predictedDisplayTime);
float fov_y = vrapi_GetSystemPropertyInt( engine->ovr, VRAPI_SYS_PROP_SUGGESTED_EYE_FOV_DEGREES_Y);
float fov_x = vrapi_GetSystemPropertyInt( engine->ovr, VRAPI_SYS_PROP_SUGGESTED_EYE_FOV_DEGREES_X);
if (vr.weapon_zoomed) {
vr.weapon_zoomLevel += 0.05;
if (vr.weapon_zoomLevel > 2.5f)
vr.weapon_zoomLevel = 2.5f;
vr.weapon_zoomLevel = 2.5f;
}
else {
//Zoom back out quicker
vr.weapon_zoomLevel -= 0.25f;
//Zoom back out quicker
vr.weapon_zoomLevel -= 0.25f;
if (vr.weapon_zoomLevel < 1.0f)
vr.weapon_zoomLevel = 1.0f;
vr.weapon_zoomLevel = 1.0f;
}
const ovrMatrix4f projectionMatrix = ovrMatrix4f_CreateProjectionFov(
fov_x / vr.weapon_zoomLevel, fov_y / vr.weapon_zoomLevel, 0.0f, 0.0f, 1.0f, 0.0f );
GLboolean stageBoundsDirty = GL_TRUE;
ovrApp_HandleXrEvents(&engine->appState);
if (engine->appState.SessionActive == GL_FALSE) {
return;
}
//Projection used for drawing HUD models etc
const ovrMatrix4f monoVRMatrix = ovrMatrix4f_CreateProjectionFov(
30.0f, 30.0f, 0.0f, 0.0f, 1.0f, 0.0f );
if (stageBoundsDirty) {
VR_UpdateStageBounds(&engine->appState);
stageBoundsDirty = GL_FALSE;
}
int eyeW, eyeH;
VR_GetResolution(engine, &eyeW, &eyeH);
// NOTE: OpenXR does not use the concept of frame indices. Instead,
// XrWaitFrame returns the predicted display time.
XrFrameWaitInfo waitFrameInfo = {};
waitFrameInfo.type = XR_TYPE_FRAME_WAIT_INFO;
waitFrameInfo.next = NULL;
if (VR_useScreenLayer() ||
(cl.snap.ps.pm_flags & PMF_FOLLOW && vr.follow_mode == VRFM_FIRSTPERSON))
{
static ovrLayer_Union2 cylinderLayer;
memset( &cylinderLayer, 0, sizeof( ovrLayer_Union2 ) );
XrFrameState frameState = {};
frameState.type = XR_TYPE_FRAME_STATE;
frameState.next = NULL;
// Add a simple cylindrical layer
cylinderLayer.Cylinder =
BuildCylinderLayer(engine, eyeW, eyeW * 0.75f, &engine->tracking, radians(vr.menuYaw) );
OXR(xrWaitFrame(engine->appState.Session, &waitFrameInfo, &frameState));
engine->predictedDisplayTime = frameState.predictedDisplayTime;
if (!frameState.shouldRender) {
return;
}
const ovrLayerHeader2* layers[] = {
&cylinderLayer.Header
};
// Get the HMD pose, predicted for the middle of the time period during which
// the new eye images will be displayed. The number of frames predicted ahead
// depends on the pipeline depth of the engine and the synthesis rate.
// The better the prediction, the less black will be pulled in at the edges.
XrFrameBeginInfo beginFrameDesc = {};
beginFrameDesc.type = XR_TYPE_FRAME_BEGIN_INFO;
beginFrameDesc.next = NULL;
OXR(xrBeginFrame(engine->appState.Session, &beginFrameDesc));
// Set up the description for this frame.
ovrSubmitFrameDescription2 frameDesc = { 0 };
frameDesc.Flags = 0;
frameDesc.SwapInterval = 1;
frameDesc.FrameIndex = engine->frameIndex;
frameDesc.DisplayTime = engine->predictedDisplayTime;
frameDesc.LayerCount = 1;
frameDesc.Layers = layers;
XrSpaceLocation loc = {};
loc.type = XR_TYPE_SPACE_LOCATION;
OXR(xrLocateSpace(
engine->appState.HeadSpace, engine->appState.CurrentSpace, frameState.predictedDisplayTime, &loc));
XrPosef xfStageFromHead = loc.pose;
OXR(xrLocateSpace(
engine->appState.HeadSpace, engine->appState.LocalSpace, frameState.predictedDisplayTime, &loc));
re.SetVRHeadsetParms(&projectionMatrix, &monoVRMatrix,
engine->framebuffers.framebuffers[engine->framebuffers.swapchainIndex]);
XrViewLocateInfo projectionInfo = {};
projectionInfo.type = XR_TYPE_VIEW_LOCATE_INFO;
projectionInfo.viewConfigurationType = engine->appState.ViewportConfig.viewConfigurationType;
projectionInfo.displayTime = frameState.predictedDisplayTime;
projectionInfo.space = engine->appState.HeadSpace;
VR_ClearFrameBuffer(engine->framebuffers.framebuffers[engine->framebuffers.swapchainIndex], eyeW, eyeH);
XrViewState viewState = {XR_TYPE_VIEW_STATE, NULL};
Com_Frame();
uint32_t projectionCapacityInput = ovrMaxNumEyes;
uint32_t projectionCountOutput = projectionCapacityInput;
engine->framebuffers.swapchainIndex = (engine->framebuffers.swapchainIndex + 1) %
engine->framebuffers.swapchainLength;
OXR(xrLocateViews(
engine->appState.Session,
&projectionInfo,
&viewState,
projectionCapacityInput,
&projectionCountOutput,
projections));
//
// Hand over the eye images to the time warp.
vrapi_SubmitFrame2(engine->ovr, &frameDesc);
}
else
{
vr.menuYaw = vr.hmdorientation[YAW];
XrFovf fov;
XrPosef viewTransform[2];
for (int eye = 0; eye < ovrMaxNumEyes; eye++) {
XrPosef xfHeadFromEye = projections[eye].pose;
XrPosef xfStageFromEye = XrPosef_Multiply(xfStageFromHead, xfHeadFromEye);
viewTransform[eye] = XrPosef_Inverse(xfStageFromEye);
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
layer.HeadPose = engine->tracking.HeadPose;
fov = projections[eye].fov;
}
vr.fov_x = (fabs(fov.angleLeft) + fabs(fov.angleRight)) * 180.0f / M_PI;
vr.fov_y = (fabs(fov.angleUp) + fabs(fov.angleDown)) * 180.0f / M_PI;
const ovrMatrix4f defaultProjection = ovrMatrix4f_CreateProjectionFov(
fov_x, fov_y, 0.0f, 0.0f, 1.0f, 0.0f );
//Projection used for drawing HUD models etc
float hudScale = M_PI * 15.0f / 180.0f;
const ovrMatrix4f monoVRMatrix = ovrMatrix4f_CreateProjectionFov(
-hudScale, hudScale, hudScale, -hudScale, 1.0f, 0.0f );
const ovrMatrix4f projectionMatrix = ovrMatrix4f_CreateProjectionFov(
fov.angleLeft / vr.weapon_zoomLevel,
fov.angleRight / vr.weapon_zoomLevel,
fov.angleUp / vr.weapon_zoomLevel,
fov.angleDown / vr.weapon_zoomLevel,
1.0f, 0.0f );
engine->appState.LayerCount = 0;
memset(engine->appState.Layers, 0, sizeof(ovrCompositorLayer_Union) * ovrMaxLayerCount);
for (int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; ++eye) {
layer.Textures[eye].ColorSwapChain = engine->framebuffers.colorTexture;
layer.Textures[eye].SwapChainIndex = engine->framebuffers.swapchainIndex;
layer.Textures[eye].TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection(&defaultProjection);
}
layer.Header.Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;
ovrFramebuffer* frameBuffer = &engine->appState.Renderer.FrameBuffer;
int swapchainIndex = engine->appState.Renderer.FrameBuffer.TextureSwapChainIndex;
int glFramebuffer = engine->appState.Renderer.FrameBuffer.FrameBuffers[swapchainIndex];
re.SetVRHeadsetParms(projectionMatrix.M, monoVRMatrix.M, glFramebuffer);
VR_ClearFrameBuffer(engine->framebuffers.framebuffers[engine->framebuffers.swapchainIndex], eyeW, eyeH);
ovrFramebuffer_Acquire(frameBuffer);
ovrFramebuffer_SetCurrent(frameBuffer);
VR_ClearFrameBuffer(frameBuffer->ColorSwapChain.Width, frameBuffer->ColorSwapChain.Height);
Com_Frame();
re.SetVRHeadsetParms(&projectionMatrix, &monoVRMatrix,
engine->framebuffers.framebuffers[engine->framebuffers.swapchainIndex]);
// Clear the alpha channel, other way OpenXR would not transfer the framebuffer fully
glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_TRUE);
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
Com_Frame();
ovrFramebuffer_Resolve(frameBuffer);
ovrFramebuffer_Release(frameBuffer);
ovrFramebuffer_SetNone();
engine->framebuffers.swapchainIndex = (engine->framebuffers.swapchainIndex + 1) %
engine->framebuffers.swapchainLength;
XrCompositionLayerProjectionView projection_layer_elements[2] = {};
if (!VR_useScreenLayer() && !(cl.snap.ps.pm_flags & PMF_FOLLOW && vr.follow_mode == VRFM_FIRSTPERSON)) {
const ovrLayerHeader2* layers[] = {
&layer.Header
};
for (int eye = 0; eye < ovrMaxNumEyes; eye++) {
ovrFramebuffer* frameBuffer = &engine->appState.Renderer.FrameBuffer;
ovrSubmitFrameDescription2 frameDesc = { 0 };
frameDesc.Flags = 0;
frameDesc.SwapInterval = 1;
frameDesc.FrameIndex = engine->frameIndex;
frameDesc.DisplayTime = engine->predictedDisplayTime;
frameDesc.LayerCount = 1;
frameDesc.Layers = layers;
memset(&projection_layer_elements[eye], 0, sizeof(XrCompositionLayerProjectionView));
projection_layer_elements[eye].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
projection_layer_elements[eye].pose = XrPosef_Inverse(viewTransform[eye]);
projection_layer_elements[eye].fov = fov;
vrapi_SubmitFrame2(engine->ovr, &frameDesc);
}
memset(&projection_layer_elements[eye].subImage, 0, sizeof(XrSwapchainSubImage));
projection_layer_elements[eye].subImage.swapchain = frameBuffer->ColorSwapChain.Handle;
projection_layer_elements[eye].subImage.imageRect.offset.x = 0;
projection_layer_elements[eye].subImage.imageRect.offset.y = 0;
projection_layer_elements[eye].subImage.imageRect.extent.width = frameBuffer->ColorSwapChain.Width;
projection_layer_elements[eye].subImage.imageRect.extent.height = frameBuffer->ColorSwapChain.Height;
projection_layer_elements[eye].subImage.imageArrayIndex = eye;
}
XrCompositionLayerProjection projection_layer = {};
projection_layer.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION;
projection_layer.layerFlags = XR_COMPOSITION_LAYER_BLEND_TEXTURE_SOURCE_ALPHA_BIT;
projection_layer.layerFlags |= XR_COMPOSITION_LAYER_CORRECT_CHROMATIC_ABERRATION_BIT;
projection_layer.space = engine->appState.CurrentSpace;
projection_layer.viewCount = ovrMaxNumEyes;
projection_layer.views = projection_layer_elements;
engine->appState.Layers[engine->appState.LayerCount++].Projection = projection_layer;
} else {
// Build the cylinder layer
int width = engine->appState.Renderer.FrameBuffer.ColorSwapChain.Width;
int height = engine->appState.Renderer.FrameBuffer.ColorSwapChain.Height;
XrCompositionLayerCylinderKHR cylinder_layer = {};
cylinder_layer.type = XR_TYPE_COMPOSITION_LAYER_CYLINDER_KHR;
cylinder_layer.layerFlags = XR_COMPOSITION_LAYER_BLEND_TEXTURE_SOURCE_ALPHA_BIT;
cylinder_layer.space = engine->appState.LocalSpace;
cylinder_layer.eyeVisibility = XR_EYE_VISIBILITY_BOTH;
memset(&cylinder_layer.subImage, 0, sizeof(XrSwapchainSubImage));
cylinder_layer.subImage.swapchain = engine->appState.Renderer.FrameBuffer.ColorSwapChain.Handle;
cylinder_layer.subImage.imageRect.offset.x = 0;
cylinder_layer.subImage.imageRect.offset.y = 0;
cylinder_layer.subImage.imageRect.extent.width = width;
cylinder_layer.subImage.imageRect.extent.height = height;
cylinder_layer.subImage.imageArrayIndex = 0;
const XrVector3f axis = {0.0f, 1.0f, 0.0f};
const XrVector3f pos = {xfStageFromHead.position.x, -0.25f, xfStageFromHead.position.z - 4.0f};
cylinder_layer.pose.orientation = XrQuaternionf_CreateFromVectorAngle(axis, 0);
cylinder_layer.pose.position = pos;
cylinder_layer.radius = 12.0f;
cylinder_layer.centralAngle = MATH_PI * 0.5f;
cylinder_layer.aspectRatio = width / (float)height / 0.75f;
engine->appState.Layers[engine->appState.LayerCount++].Cylinder = cylinder_layer;
}
// Compose the layers for this frame.
const XrCompositionLayerBaseHeader* layers[ovrMaxLayerCount] = {};
for (int i = 0; i < engine->appState.LayerCount; i++) {
layers[i] = (const XrCompositionLayerBaseHeader*)&engine->appState.Layers[i];
}
XrFrameEndInfo endFrameInfo = {};
endFrameInfo.type = XR_TYPE_FRAME_END_INFO;
endFrameInfo.displayTime = frameState.predictedDisplayTime;
endFrameInfo.environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE;
endFrameInfo.layerCount = engine->appState.LayerCount;
endFrameInfo.layers = layers;
OXR(xrEndFrame(engine->appState.Session, &endFrameInfo));
frameBuffer->TextureSwapChainIndex++;
frameBuffer->TextureSwapChainIndex %= frameBuffer->TextureSwapChainLength;
}

View File

@ -9,6 +9,7 @@ void VR_GetResolution( engine_t* engine, int *pWidth, int *pHeight );
void VR_InitRenderer( engine_t* engine );
void VR_DestroyRenderer( engine_t* engine );
void VR_DrawFrame( engine_t* engine );
void VR_ReInitRenderer();
#endif

View File

@ -0,0 +1,591 @@
#include "vr_types.h"
/************************************************************************************
Original file name : XrCompositor_NativeActivity.c
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
*************************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <string.h>
#include <math.h>
#include <time.h>
#include <pthread.h>
#include <sys/prctl.h>
#include <assert.h>
typedef void(GL_APIENTRY* PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC)(
GLenum target,
GLenum attachment,
GLuint texture,
GLint level,
GLint baseViewIndex,
GLsizei numViews);
/*
================================================================================
ovrFramebuffer
================================================================================
*/
void ovrFramebuffer_Clear(ovrFramebuffer* frameBuffer) {
frameBuffer->Width = 0;
frameBuffer->Height = 0;
frameBuffer->TextureSwapChainLength = 0;
frameBuffer->TextureSwapChainIndex = 0;
frameBuffer->ColorSwapChain.Handle = XR_NULL_HANDLE;
frameBuffer->ColorSwapChain.Width = 0;
frameBuffer->ColorSwapChain.Height = 0;
frameBuffer->ColorSwapChainImage = NULL;
frameBuffer->DepthBuffers = NULL;
frameBuffer->FrameBuffers = NULL;
}
bool ovrFramebuffer_Create(
XrSession session,
ovrFramebuffer* frameBuffer,
const int width,
const int height) {
frameBuffer->Width = width;
frameBuffer->Height = height;
PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC glFramebufferTextureMultiviewOVR =
(PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC)eglGetProcAddress(
"glFramebufferTextureMultiviewOVR");
XrSwapchainCreateInfo swapChainCreateInfo;
memset(&swapChainCreateInfo, 0, sizeof(swapChainCreateInfo));
swapChainCreateInfo.type = XR_TYPE_SWAPCHAIN_CREATE_INFO;
swapChainCreateInfo.usageFlags = XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT;
swapChainCreateInfo.format = GL_RGBA8;
swapChainCreateInfo.sampleCount = 1;
swapChainCreateInfo.width = width;
swapChainCreateInfo.height = height;
swapChainCreateInfo.faceCount = 1;
swapChainCreateInfo.arraySize = 2;
swapChainCreateInfo.mipCount = 1;
frameBuffer->ColorSwapChain.Width = swapChainCreateInfo.width;
frameBuffer->ColorSwapChain.Height = swapChainCreateInfo.height;
// Create the swapchain.
OXR(xrCreateSwapchain(session, &swapChainCreateInfo, &frameBuffer->ColorSwapChain.Handle));
// Get the number of swapchain images.
OXR(xrEnumerateSwapchainImages(
frameBuffer->ColorSwapChain.Handle, 0, &frameBuffer->TextureSwapChainLength, NULL));
// Allocate the swapchain images array.
frameBuffer->ColorSwapChainImage = (XrSwapchainImageOpenGLESKHR*)malloc(
frameBuffer->TextureSwapChainLength * sizeof(XrSwapchainImageOpenGLESKHR));
// Populate the swapchain image array.
for (uint32_t i = 0; i < frameBuffer->TextureSwapChainLength; i++) {
frameBuffer->ColorSwapChainImage[i].type = XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR;
frameBuffer->ColorSwapChainImage[i].next = NULL;
}
OXR(xrEnumerateSwapchainImages(
frameBuffer->ColorSwapChain.Handle,
frameBuffer->TextureSwapChainLength,
&frameBuffer->TextureSwapChainLength,
(XrSwapchainImageBaseHeader*)frameBuffer->ColorSwapChainImage));
frameBuffer->DepthBuffers =
(GLuint*)malloc(frameBuffer->TextureSwapChainLength * sizeof(GLuint));
frameBuffer->FrameBuffers =
(GLuint*)malloc(frameBuffer->TextureSwapChainLength * sizeof(GLuint));
for (uint32_t i = 0; i < frameBuffer->TextureSwapChainLength; i++) {
// Create the color buffer texture.
const GLuint colorTexture = frameBuffer->ColorSwapChainImage[i].image;
GLfloat borderColor[] = {0.0f, 0.0f, 0.0f, 0.0f};
GLenum textureTarget = GL_TEXTURE_2D_ARRAY;
GL(glBindTexture(textureTarget, colorTexture));
GL(glTexParameterfv(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_BORDER_COLOR, borderColor));
GL(glTexParameteri(textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE));
GL(glTexParameteri(textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE));
GL(glTexParameteri(textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
GL(glTexParameteri(textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL(glBindTexture(textureTarget, 0));
// Create depth buffer.
GL(glGenTextures(1, &frameBuffer->DepthBuffers[i]));
GL(glBindTexture(textureTarget, frameBuffer->DepthBuffers[i]));
GL(glTexStorage3D(textureTarget, 1, GL_DEPTH_COMPONENT24, width, height, 2));
GL(glBindTexture(textureTarget, 0));
// Create the frame buffer.
GL(glGenFramebuffers(1, &frameBuffer->FrameBuffers[i]));
GL(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frameBuffer->FrameBuffers[i]));
GL(glFramebufferTextureMultiviewOVR(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, frameBuffer->DepthBuffers[i], 0, 0, 2));
GL(glFramebufferTextureMultiviewOVR(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, colorTexture, 0, 0, 2));
GL(GLenum renderFramebufferStatus = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER));
GL(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
if (renderFramebufferStatus != GL_FRAMEBUFFER_COMPLETE) {
ALOGE("Incomplete frame buffer object: %d", renderFramebufferStatus);
return false;
}
}
return true;
}
void ovrFramebuffer_Destroy(ovrFramebuffer* frameBuffer) {
GL(glDeleteFramebuffers(frameBuffer->TextureSwapChainLength, frameBuffer->FrameBuffers));
GL(glDeleteRenderbuffers(frameBuffer->TextureSwapChainLength, frameBuffer->DepthBuffers));
OXR(xrDestroySwapchain(frameBuffer->ColorSwapChain.Handle));
free(frameBuffer->ColorSwapChainImage);
free(frameBuffer->DepthBuffers);
free(frameBuffer->FrameBuffers);
ovrFramebuffer_Clear(frameBuffer);
}
void ovrFramebuffer_SetCurrent(ovrFramebuffer* frameBuffer) {
GL(glBindFramebuffer(
GL_DRAW_FRAMEBUFFER, frameBuffer->FrameBuffers[frameBuffer->TextureSwapChainIndex]));
}
void ovrFramebuffer_SetNone() {
GL(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
}
void ovrFramebuffer_Resolve(ovrFramebuffer* frameBuffer) {
// Discard the depth buffer, so the tiler won't need to write it back out to memory.
const GLenum depthAttachment[1] = {GL_DEPTH_ATTACHMENT};
glInvalidateFramebuffer(GL_DRAW_FRAMEBUFFER, 1, depthAttachment);
}
void ovrFramebuffer_Acquire(ovrFramebuffer* frameBuffer) {
// Acquire the swapchain image
XrSwapchainImageAcquireInfo acquireInfo = {XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO, NULL};
OXR(xrAcquireSwapchainImage(
frameBuffer->ColorSwapChain.Handle, &acquireInfo, &frameBuffer->TextureSwapChainIndex));
XrSwapchainImageWaitInfo waitInfo;
waitInfo.type = XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO;
waitInfo.next = NULL;
waitInfo.timeout = 1000; /* timeout in nanoseconds */
XrResult res = xrWaitSwapchainImage(frameBuffer->ColorSwapChain.Handle, &waitInfo);
int i = 0;
while (res != XR_SUCCESS) {
res = xrWaitSwapchainImage(frameBuffer->ColorSwapChain.Handle, &waitInfo);
i++;
ALOGV(
" Retry xrWaitSwapchainImage %d times due to XR_TIMEOUT_EXPIRED (duration %f micro seconds)",
i,
waitInfo.timeout * (1E-9));
}
}
void ovrFramebuffer_Release(ovrFramebuffer* frameBuffer) {
XrSwapchainImageReleaseInfo releaseInfo = {XR_TYPE_SWAPCHAIN_IMAGE_RELEASE_INFO, NULL};
OXR(xrReleaseSwapchainImage(frameBuffer->ColorSwapChain.Handle, &releaseInfo));
}
/*
================================================================================
ovrRenderer
================================================================================
*/
void ovrRenderer_Clear(ovrRenderer* renderer) {
ovrFramebuffer_Clear(&renderer->FrameBuffer);
}
void ovrRenderer_Create(
XrSession session,
ovrRenderer* renderer,
int suggestedEyeTextureWidth,
int suggestedEyeTextureHeight) {
// Create the frame buffers.
ovrFramebuffer_Create(
session,
&renderer->FrameBuffer,
suggestedEyeTextureWidth,
suggestedEyeTextureHeight);
}
void ovrRenderer_Destroy(ovrRenderer* renderer) {
ovrFramebuffer_Destroy(&renderer->FrameBuffer);
}
/*
================================================================================
ovrApp
================================================================================
*/
void ovrApp_Clear(ovrApp* app) {
app->Focused = false;
app->Instance = XR_NULL_HANDLE;
app->Session = XR_NULL_HANDLE;
memset(&app->ViewportConfig, 0, sizeof(XrViewConfigurationProperties));
memset(&app->ViewConfigurationView, 0, ovrMaxNumEyes * sizeof(XrViewConfigurationView));
app->SystemId = XR_NULL_SYSTEM_ID;
app->HeadSpace = XR_NULL_HANDLE;
app->LocalSpace = XR_NULL_HANDLE;
app->StageSpace = XR_NULL_HANDLE;
app->FakeStageSpace = XR_NULL_HANDLE;
app->CurrentSpace = XR_NULL_HANDLE;
app->SessionActive = false;
app->SupportedDisplayRefreshRates = NULL;
app->RequestedDisplayRefreshRateIndex = 0;
app->NumSupportedDisplayRefreshRates = 0;
app->pfnGetDisplayRefreshRate = NULL;
app->pfnRequestDisplayRefreshRate = NULL;
app->SwapInterval = 1;
memset(app->Layers, 0, sizeof(ovrCompositorLayer_Union) * ovrMaxLayerCount);
app->LayerCount = 0;
app->MainThreadTid = 0;
app->RenderThreadTid = 0;
app->TouchPadDownLastFrame = false;
ovrRenderer_Clear(&app->Renderer);
}
void ovrApp_Destroy(ovrApp* app) {
if (app->SupportedDisplayRefreshRates != NULL) {
free(app->SupportedDisplayRefreshRates);
}
ovrApp_Clear(app);
}
void ovrApp_HandleSessionStateChanges(ovrApp* app, XrSessionState state) {
if (state == XR_SESSION_STATE_READY) {
assert(app->SessionActive == false);
XrSessionBeginInfo sessionBeginInfo;
memset(&sessionBeginInfo, 0, sizeof(sessionBeginInfo));
sessionBeginInfo.type = XR_TYPE_SESSION_BEGIN_INFO;
sessionBeginInfo.next = NULL;
sessionBeginInfo.primaryViewConfigurationType = app->ViewportConfig.viewConfigurationType;
XrResult result;
OXR(result = xrBeginSession(app->Session, &sessionBeginInfo));
app->SessionActive = (result == XR_SUCCESS);
// Set session state once we have entered VR mode and have a valid session object.
if (app->SessionActive) {
XrPerfSettingsLevelEXT cpuPerfLevel = XR_PERF_SETTINGS_LEVEL_BOOST_EXT;
XrPerfSettingsLevelEXT gpuPerfLevel = XR_PERF_SETTINGS_LEVEL_BOOST_EXT;
PFN_xrPerfSettingsSetPerformanceLevelEXT pfnPerfSettingsSetPerformanceLevelEXT = NULL;
OXR(xrGetInstanceProcAddr(
app->Instance,
"xrPerfSettingsSetPerformanceLevelEXT",
(PFN_xrVoidFunction*)(&pfnPerfSettingsSetPerformanceLevelEXT)));
OXR(pfnPerfSettingsSetPerformanceLevelEXT(
app->Session, XR_PERF_SETTINGS_DOMAIN_CPU_EXT, cpuPerfLevel));
OXR(pfnPerfSettingsSetPerformanceLevelEXT(
app->Session, XR_PERF_SETTINGS_DOMAIN_GPU_EXT, gpuPerfLevel));
PFN_xrSetAndroidApplicationThreadKHR pfnSetAndroidApplicationThreadKHR = NULL;
OXR(xrGetInstanceProcAddr(
app->Instance,
"xrSetAndroidApplicationThreadKHR",
(PFN_xrVoidFunction*)(&pfnSetAndroidApplicationThreadKHR)));
OXR(pfnSetAndroidApplicationThreadKHR(
app->Session, XR_ANDROID_THREAD_TYPE_APPLICATION_MAIN_KHR, app->MainThreadTid));
OXR(pfnSetAndroidApplicationThreadKHR(
app->Session, XR_ANDROID_THREAD_TYPE_RENDERER_MAIN_KHR, app->RenderThreadTid));
}
} else if (state == XR_SESSION_STATE_STOPPING) {
assert(app->SessionActive);
OXR(xrEndSession(app->Session));
app->SessionActive = false;
}
}
void ovrApp_HandleXrEvents(ovrApp* app) {
XrEventDataBuffer eventDataBuffer = {};
// Poll for events
for (;;) {
XrEventDataBaseHeader* baseEventHeader = (XrEventDataBaseHeader*)(&eventDataBuffer);
baseEventHeader->type = XR_TYPE_EVENT_DATA_BUFFER;
baseEventHeader->next = NULL;
XrResult r;
OXR(r = xrPollEvent(app->Instance, &eventDataBuffer));
if (r != XR_SUCCESS) {
break;
}
switch (baseEventHeader->type) {
case XR_TYPE_EVENT_DATA_EVENTS_LOST:
ALOGV("xrPollEvent: received XR_TYPE_EVENT_DATA_EVENTS_LOST event");
break;
case XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING: {
const XrEventDataInstanceLossPending* instance_loss_pending_event =
(XrEventDataInstanceLossPending*)(baseEventHeader);
ALOGV(
"xrPollEvent: received XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING event: time %f",
FromXrTime(instance_loss_pending_event->lossTime));
} break;
case XR_TYPE_EVENT_DATA_INTERACTION_PROFILE_CHANGED:
ALOGV("xrPollEvent: received XR_TYPE_EVENT_DATA_INTERACTION_PROFILE_CHANGED event");
break;
case XR_TYPE_EVENT_DATA_PERF_SETTINGS_EXT: {
const XrEventDataPerfSettingsEXT* perf_settings_event =
(XrEventDataPerfSettingsEXT*)(baseEventHeader);
ALOGV(
"xrPollEvent: received XR_TYPE_EVENT_DATA_PERF_SETTINGS_EXT event: type %d subdomain %d : level %d -> level %d",
perf_settings_event->type,
perf_settings_event->subDomain,
perf_settings_event->fromLevel,
perf_settings_event->toLevel);
} break;
case XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB: {
const XrEventDataDisplayRefreshRateChangedFB* refresh_rate_changed_event =
(XrEventDataDisplayRefreshRateChangedFB*)(baseEventHeader);
ALOGV(
"xrPollEvent: received XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB event: fromRate %f -> toRate %f",
refresh_rate_changed_event->fromDisplayRefreshRate,
refresh_rate_changed_event->toDisplayRefreshRate);
} break;
case XR_TYPE_EVENT_DATA_REFERENCE_SPACE_CHANGE_PENDING: {
XrEventDataReferenceSpaceChangePending* ref_space_change_event =
(XrEventDataReferenceSpaceChangePending*)(baseEventHeader);
ALOGV(
"xrPollEvent: received XR_TYPE_EVENT_DATA_REFERENCE_SPACE_CHANGE_PENDING event: changed space: %d for session %p at time %f",
ref_space_change_event->referenceSpaceType,
(void*)ref_space_change_event->session,
FromXrTime(ref_space_change_event->changeTime));
} break;
case XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED: {
const XrEventDataSessionStateChanged* session_state_changed_event =
(XrEventDataSessionStateChanged*)(baseEventHeader);
ALOGV(
"xrPollEvent: received XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED: %d for session %p at time %f",
session_state_changed_event->state,
(void*)session_state_changed_event->session,
FromXrTime(session_state_changed_event->time));
switch (session_state_changed_event->state) {
case XR_SESSION_STATE_FOCUSED:
app->Focused = true;
break;
case XR_SESSION_STATE_VISIBLE:
app->Focused = false;
break;
case XR_SESSION_STATE_READY:
case XR_SESSION_STATE_STOPPING:
ovrApp_HandleSessionStateChanges(app, session_state_changed_event->state);
break;
default:
break;
}
} break;
default:
ALOGV("xrPollEvent: Unknown event");
break;
}
}
}
/*
================================================================================
ovrMatrix4f
================================================================================
*/
ovrMatrix4f ovrMatrix4f_CreateProjectionFov(
const float angleLeft,
const float angleRight,
const float angleUp,
const float angleDown,
const float nearZ,
const float farZ) {
const float tanAngleLeft = tanf(angleLeft);
const float tanAngleRight = tanf(angleRight);
const float tanAngleDown = tanf(angleDown);
const float tanAngleUp = tanf(angleUp);
const float tanAngleWidth = tanAngleRight - tanAngleLeft;
// Set to tanAngleDown - tanAngleUp for a clip space with positive Y
// down (Vulkan). Set to tanAngleUp - tanAngleDown for a clip space with
// positive Y up (OpenGL / D3D / Metal).
const float tanAngleHeight = tanAngleUp - tanAngleDown;
// Set to nearZ for a [-1,1] Z clip space (OpenGL / OpenGL ES).
// Set to zero for a [0,1] Z clip space (Vulkan / D3D / Metal).
const float offsetZ = nearZ;
ovrMatrix4f result;
if (farZ <= nearZ) {
// place the far plane at infinity
result.M[0][0] = 2 / tanAngleWidth;
result.M[0][1] = 0;
result.M[0][2] = (tanAngleRight + tanAngleLeft) / tanAngleWidth;
result.M[0][3] = 0;
result.M[1][0] = 0;
result.M[1][1] = 2 / tanAngleHeight;
result.M[1][2] = (tanAngleUp + tanAngleDown) / tanAngleHeight;
result.M[1][3] = 0;
result.M[2][0] = 0;
result.M[2][1] = 0;
result.M[2][2] = -1;
result.M[2][3] = -(nearZ + offsetZ);
result.M[3][0] = 0;
result.M[3][1] = 0;
result.M[3][2] = -1;
result.M[3][3] = 0;
} else {
// normal projection
result.M[0][0] = 2 / tanAngleWidth;
result.M[0][1] = 0;
result.M[0][2] = (tanAngleRight + tanAngleLeft) / tanAngleWidth;
result.M[0][3] = 0;
result.M[1][0] = 0;
result.M[1][1] = 2 / tanAngleHeight;
result.M[1][2] = (tanAngleUp + tanAngleDown) / tanAngleHeight;
result.M[1][3] = 0;
result.M[2][0] = 0;
result.M[2][1] = 0;
result.M[2][2] = -(farZ + offsetZ) / (farZ - nearZ);
result.M[2][3] = -(farZ * (nearZ + offsetZ)) / (farZ - nearZ);
result.M[3][0] = 0;
result.M[3][1] = 0;
result.M[3][2] = -1;
result.M[3][3] = 0;
}
return result;
}
ovrMatrix4f ovrMatrix4f_CreateFromQuaternion(const XrQuaternionf* q) {
const float ww = q->w * q->w;
const float xx = q->x * q->x;
const float yy = q->y * q->y;
const float zz = q->z * q->z;
ovrMatrix4f out;
out.M[0][0] = ww + xx - yy - zz;
out.M[0][1] = 2 * (q->x * q->y - q->w * q->z);
out.M[0][2] = 2 * (q->x * q->z + q->w * q->y);
out.M[0][3] = 0;
out.M[1][0] = 2 * (q->x * q->y + q->w * q->z);
out.M[1][1] = ww - xx + yy - zz;
out.M[1][2] = 2 * (q->y * q->z - q->w * q->x);
out.M[1][3] = 0;
out.M[2][0] = 2 * (q->x * q->z - q->w * q->y);
out.M[2][1] = 2 * (q->y * q->z + q->w * q->x);
out.M[2][2] = ww - xx - yy + zz;
out.M[2][3] = 0;
out.M[3][0] = 0;
out.M[3][1] = 0;
out.M[3][2] = 0;
out.M[3][3] = 1;
return out;
}
/// Use left-multiplication to accumulate transformations.
ovrMatrix4f ovrMatrix4f_Multiply(const ovrMatrix4f* a, const ovrMatrix4f* b) {
ovrMatrix4f out;
out.M[0][0] = a->M[0][0] * b->M[0][0] + a->M[0][1] * b->M[1][0] + a->M[0][2] * b->M[2][0] +
a->M[0][3] * b->M[3][0];
out.M[1][0] = a->M[1][0] * b->M[0][0] + a->M[1][1] * b->M[1][0] + a->M[1][2] * b->M[2][0] +
a->M[1][3] * b->M[3][0];
out.M[2][0] = a->M[2][0] * b->M[0][0] + a->M[2][1] * b->M[1][0] + a->M[2][2] * b->M[2][0] +
a->M[2][3] * b->M[3][0];
out.M[3][0] = a->M[3][0] * b->M[0][0] + a->M[3][1] * b->M[1][0] + a->M[3][2] * b->M[2][0] +
a->M[3][3] * b->M[3][0];
out.M[0][1] = a->M[0][0] * b->M[0][1] + a->M[0][1] * b->M[1][1] + a->M[0][2] * b->M[2][1] +
a->M[0][3] * b->M[3][1];
out.M[1][1] = a->M[1][0] * b->M[0][1] + a->M[1][1] * b->M[1][1] + a->M[1][2] * b->M[2][1] +
a->M[1][3] * b->M[3][1];
out.M[2][1] = a->M[2][0] * b->M[0][1] + a->M[2][1] * b->M[1][1] + a->M[2][2] * b->M[2][1] +
a->M[2][3] * b->M[3][1];
out.M[3][1] = a->M[3][0] * b->M[0][1] + a->M[3][1] * b->M[1][1] + a->M[3][2] * b->M[2][1] +
a->M[3][3] * b->M[3][1];
out.M[0][2] = a->M[0][0] * b->M[0][2] + a->M[0][1] * b->M[1][2] + a->M[0][2] * b->M[2][2] +
a->M[0][3] * b->M[3][2];
out.M[1][2] = a->M[1][0] * b->M[0][2] + a->M[1][1] * b->M[1][2] + a->M[1][2] * b->M[2][2] +
a->M[1][3] * b->M[3][2];
out.M[2][2] = a->M[2][0] * b->M[0][2] + a->M[2][1] * b->M[1][2] + a->M[2][2] * b->M[2][2] +
a->M[2][3] * b->M[3][2];
out.M[3][2] = a->M[3][0] * b->M[0][2] + a->M[3][1] * b->M[1][2] + a->M[3][2] * b->M[2][2] +
a->M[3][3] * b->M[3][2];
out.M[0][3] = a->M[0][0] * b->M[0][3] + a->M[0][1] * b->M[1][3] + a->M[0][2] * b->M[2][3] +
a->M[0][3] * b->M[3][3];
out.M[1][3] = a->M[1][0] * b->M[0][3] + a->M[1][1] * b->M[1][3] + a->M[1][2] * b->M[2][3] +
a->M[1][3] * b->M[3][3];
out.M[2][3] = a->M[2][0] * b->M[0][3] + a->M[2][1] * b->M[1][3] + a->M[2][2] * b->M[2][3] +
a->M[2][3] * b->M[3][3];
out.M[3][3] = a->M[3][0] * b->M[0][3] + a->M[3][1] * b->M[1][3] + a->M[3][2] * b->M[2][3] +
a->M[3][3] * b->M[3][3];
return out;
}
ovrMatrix4f ovrMatrix4f_CreateRotation(const float radiansX, const float radiansY, const float radiansZ) {
const float sinX = sinf(radiansX);
const float cosX = cosf(radiansX);
const ovrMatrix4f rotationX = {
{{1, 0, 0, 0}, {0, cosX, -sinX, 0}, {0, sinX, cosX, 0}, {0, 0, 0, 1}}};
const float sinY = sinf(radiansY);
const float cosY = cosf(radiansY);
const ovrMatrix4f rotationY = {
{{cosY, 0, sinY, 0}, {0, 1, 0, 0}, {-sinY, 0, cosY, 0}, {0, 0, 0, 1}}};
const float sinZ = sinf(radiansZ);
const float cosZ = cosf(radiansZ);
const ovrMatrix4f rotationZ = {
{{cosZ, -sinZ, 0, 0}, {sinZ, cosZ, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}}};
const ovrMatrix4f rotationXY = ovrMatrix4f_Multiply(&rotationY, &rotationX);
return ovrMatrix4f_Multiply(&rotationZ, &rotationXY);
}
XrVector4f XrVector4f_MultiplyMatrix4f(const ovrMatrix4f* a, const XrVector4f* v) {
XrVector4f out;
out.x = a->M[0][0] * v->x + a->M[0][1] * v->y + a->M[0][2] * v->z + a->M[0][3] * v->w;
out.y = a->M[1][0] * v->x + a->M[1][1] * v->y + a->M[1][2] * v->z + a->M[1][3] * v->w;
out.z = a->M[2][0] * v->x + a->M[2][1] * v->y + a->M[2][2] * v->z + a->M[2][3] * v->w;
out.w = a->M[3][0] * v->x + a->M[3][1] * v->y + a->M[3][2] * v->z + a->M[3][3] * v->w;
return out;
}
/*
================================================================================
ovrTrackedController
================================================================================
*/
void ovrTrackedController_Clear(ovrTrackedController* controller) {
controller->Active = false;
controller->Pose = XrPosef_Identity();
}

View File

@ -9,26 +9,136 @@
# include <SDL_opengles2.h>
#endif
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wstrict-prototypes"
#include <VrApi.h>
#pragma clang diagnostic pop
//OpenXR
#define XR_USE_GRAPHICS_API_OPENGL_ES 1
#define XR_USE_PLATFORM_ANDROID 1
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES3/gl3.h>
#include <GLES3/gl3ext.h>
#include <jni.h>
#include <openxr/openxr.h>
#include <openxr/openxr_platform.h>
#include <openxr/openxr_oculus.h>
#include <openxr/openxr_oculus_helpers.h>
#define MATH_PI 3.14159265358979323846f
#define ALOGE(...) printf(__VA_ARGS__)
#define ALOGV(...) printf(__VA_ARGS__)
typedef union {
XrCompositionLayerProjection Projection;
XrCompositionLayerCylinderKHR Cylinder;
} ovrCompositorLayer_Union;
enum { ovrMaxLayerCount = 1 };
enum { ovrMaxNumEyes = 2 };
#define GL(func) func;
#define OXR(func) func;
typedef struct {
int swapchainLength;
int swapchainIndex;
ovrTextureSwapChain* colorTexture;
GLuint* depthBuffers;
GLuint* framebuffers;
} framebuffer_t;
JavaVM* Vm;
jobject ActivityObject;
JNIEnv* Env;
} ovrJava;
typedef struct {
XrSwapchain Handle;
uint32_t Width;
uint32_t Height;
} ovrSwapChain;
typedef struct {
int Width;
int Height;
uint32_t TextureSwapChainLength;
uint32_t TextureSwapChainIndex;
ovrSwapChain ColorSwapChain;
XrSwapchainImageOpenGLESKHR* ColorSwapChainImage;
GLuint* DepthBuffers;
GLuint* FrameBuffers;
} ovrFramebuffer;
typedef struct {
ovrFramebuffer FrameBuffer;
} ovrRenderer;
typedef struct {
GLboolean Active;
XrPosef Pose;
} ovrTrackedController;
typedef struct {
GLboolean Focused;
XrInstance Instance;
XrSession Session;
XrViewConfigurationProperties ViewportConfig;
XrViewConfigurationView ViewConfigurationView[ovrMaxNumEyes];
XrSystemId SystemId;
XrSpace HeadSpace;
XrSpace LocalSpace;
XrSpace StageSpace;
XrSpace FakeStageSpace;
XrSpace CurrentSpace;
GLboolean SessionActive;
float* SupportedDisplayRefreshRates;
uint32_t RequestedDisplayRefreshRateIndex;
uint32_t NumSupportedDisplayRefreshRates;
PFN_xrGetDisplayRefreshRateFB pfnGetDisplayRefreshRate;
PFN_xrRequestDisplayRefreshRateFB pfnRequestDisplayRefreshRate;
int SwapInterval;
// These threads will be marked as performance threads.
int MainThreadTid;
int RenderThreadTid;
ovrCompositorLayer_Union Layers[ovrMaxLayerCount];
int LayerCount;
GLboolean TouchPadDownLastFrame;
ovrRenderer Renderer;
ovrTrackedController TrackedController[4]; // left aim, left grip, right aim, right grip
} ovrApp;
typedef struct {
float M[4][4];
} ovrMatrix4f;
typedef enum ovrButton_ {
ovrButton_A = 0x00000001, // Set for trigger pulled on the Gear VR and Go Controllers
ovrButton_B = 0x00000002,
ovrButton_RThumb = 0x00000004,
ovrButton_RShoulder = 0x00000008,
ovrButton_X = 0x00000100,
ovrButton_Y = 0x00000200,
ovrButton_LThumb = 0x00000400,
ovrButton_LShoulder = 0x00000800,
ovrButton_Up = 0x00010000,
ovrButton_Down = 0x00020000,
ovrButton_Left = 0x00040000,
ovrButton_Right = 0x00080000,
ovrButton_Enter = 0x00100000, //< Set for touchpad click on the Go Controller, menu
// button on Left Quest Controller
ovrButton_Back = 0x00200000, //< Back button on the Go Controller (only set when
// a short press comes up)
ovrButton_GripTrigger = 0x04000000, //< grip trigger engaged
ovrButton_Trigger = 0x20000000, //< Index Trigger engaged
ovrButton_Joystick = 0x80000000, //< Click of the Joystick
ovrButton_EnumSize = 0x7fffffff
} ovrButton;
typedef struct {
uint64_t frameIndex;
ovrMobile* ovr;
ovrApp appState;
ovrJava java;
double predictedDisplayTime;
ovrTracking2 tracking;
framebuffer_t framebuffers;//[VRAPI_FRAME_LAYER_EYE_MAX];
float predictedDisplayTime;
} engine_t;
typedef enum {
@ -47,4 +157,36 @@ typedef enum {
VRFM_QUERY = 99 //Used to query which mode is active
} vrFollowMode_t;
void ovrApp_Clear(ovrApp* app);
void ovrApp_Destroy(ovrApp* app);
void ovrApp_HandleXrEvents(ovrApp* app);
void ovrFramebuffer_Acquire(ovrFramebuffer* frameBuffer);
void ovrFramebuffer_Resolve(ovrFramebuffer* frameBuffer);
void ovrFramebuffer_Release(ovrFramebuffer* frameBuffer);
void ovrFramebuffer_SetCurrent(ovrFramebuffer* frameBuffer);
void ovrFramebuffer_SetNone();
void ovrRenderer_Create(
XrSession session,
ovrRenderer* renderer,
int suggestedEyeTextureWidth,
int suggestedEyeTextureHeight);
void ovrRenderer_Destroy(ovrRenderer* renderer);
void ovrTrackedController_Clear(ovrTrackedController* controller);
ovrMatrix4f ovrMatrix4f_Multiply(const ovrMatrix4f* a, const ovrMatrix4f* b);
ovrMatrix4f ovrMatrix4f_CreateRotation(const float radiansX, const float radiansY, const float radiansZ);
ovrMatrix4f ovrMatrix4f_CreateFromQuaternion(const XrQuaternionf* q);
ovrMatrix4f ovrMatrix4f_CreateProjectionFov(
const float fovDegreesX,
const float fovDegreesY,
const float offsetX,
const float offsetY,
const float nearZ,
const float farZ);
XrVector4f XrVector4f_MultiplyMatrix4f(const ovrMatrix4f* a, const XrVector4f* v);
#endif

View File

@ -4,9 +4,6 @@
#include <android/log.h>
#include <VrApi.h>
#include <VrApi_Helpers.h>
#include <client/keycodes.h>
#include <qcommon/q_shared.h>
#include <qcommon/qcommon.h>
@ -27,7 +24,7 @@ extern void CON_LogcatFn( void (*LogcatFn)( const char* message ) );
static JNIEnv* g_Env = NULL;
static JavaVM* g_JavaVM = NULL;
static jobject g_ActivityObject = NULL;
static bool g_HasFocus = true;
static qboolean g_HasFocus = qtrue;
JNIEXPORT void JNICALL Java_com_drbeef_ioq3quest_MainActivity_nativeCreate(JNIEnv* env, jclass cls, jobject thisObject)
{
@ -80,23 +77,22 @@ int main(int argc, char* argv[]) {
Com_Init(args);
NET_Init( );
VR_EnterVR(engine, java);
VR_InitRenderer(engine);
VR_EnterVR(engine, java);
bool hasFocus = true;
bool paused = false;
qboolean hasFocus = qtrue;
qboolean paused = qfalse;
while (1) {
if (hasFocus != g_HasFocus) {
hasFocus = g_HasFocus;
if (!hasFocus && VR_isPauseable()) {
Com_QueueEvent( Sys_Milliseconds(), SE_KEY, K_ESCAPE, qtrue, 0, NULL );
//Com_QueueEvent( Sys_Milliseconds(), SE_KEY, K_CONSOLE, qtrue, 0, NULL );
paused = true;
paused = qtrue;
} else if (hasFocus && paused) {
//Com_QueueEvent( Sys_Milliseconds(), SE_KEY, K_CONSOLE, qtrue, 0, NULL );
Com_QueueEvent( Sys_Milliseconds(), SE_KEY, K_ESCAPE, qtrue, 0, NULL );
paused = false;
paused = qfalse;
}
}