Continued W.I.P

This commit is contained in:
Simon 2020-03-01 23:09:29 +00:00
parent b9f86b40cf
commit a562866933
5 changed files with 199 additions and 200 deletions

View file

@ -151,12 +151,6 @@ bool useScreenLayer()
return useVirtualScreen || forceVirtualScreen;
}
int runStatus = -1;
void QzDoom_exit(int exitCode)
{
runStatus = exitCode;
}
static void UnEscapeQuotes( char *arg )
{
char *last = NULL;
@ -1263,7 +1257,7 @@ int m_height;
//qboolean R_SetMode( void );
void Android_GetScreenRes(int *width, int *height)
void Android_GetScreenRes(uint32_t *width, uint32_t *height)
{
if (useScreenLayer())
{
@ -1325,6 +1319,8 @@ extern void SDL_Android_Init(JNIEnv* env, jclass cls);
static ovrAppThread * gAppThread = NULL;
static ovrApp gAppState;
static ovrJava java;
static bool destroyed = false;
void RenderFrame()
{
@ -1401,7 +1397,7 @@ void finishEyeBuffer(int eye )
ovrFramebuffer_SetNone();
}
bool processMessageQueue(bool destroyed) {
bool processMessageQueue() {
for ( ; ; )
{
ovrMessage message;
@ -1465,10 +1461,10 @@ bool processMessageQueue(bool destroyed) {
ovrApp_HandleVrModeChanges( &gAppState );
}
return destroyed;
}
void vr_main();
void showLoadingIcon();
void * AppThreadFunction(void * parm ) {
gAppThread = (ovrAppThread *) parm;
@ -1503,7 +1499,7 @@ void * AppThreadFunction(void * parm ) {
vrapi_SetPropertyInt(&gAppState.Java, VRAPI_EAT_NATIVE_GAMEPAD_EVENTS, 0);
//Using a symmetrical render target
m_height = m_width = (int)(vrapi_GetSystemPropertyInt(&java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH) * SS_MULTIPLIER);
cylinderSize[0] = cylinderSize[1] = m_height = m_width = (int)(vrapi_GetSystemPropertyInt(&java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH) * SS_MULTIPLIER);
//Use floor based tracking space
vrapi_SetTrackingSpace(gAppState.Ovr, VRAPI_TRACKING_SPACE_LOCAL_FLOOR);
@ -1519,9 +1515,28 @@ void * AppThreadFunction(void * parm ) {
ovrRenderer_Create(m_width, m_height, &gAppState.Renderer, &java);
if ( gAppState.Ovr == NULL )
{
return NULL;
}
// Create the scene if not yet created.
ovrScene_Create( cylinderSize[0], cylinderSize[1], &gAppState.Scene, &java );
chdir("/sdcard/QzDoom");
vr_main();
//Run loading loop until we are ready to start QzDoom
while (!destroyed && !qzdoom_initialised) {
processMessageQueue();
incrementFrameIndex();
showLoadingIcon();
}
//Should now be all set up and ready - start the Doom main loop
D_DoomMain();
//We are done, shutdown cleanly
shutdownVR();
return NULL;
}
@ -1552,7 +1567,8 @@ void processHaptics() {//Handle haptics
}
}
void showLoadingIcon() {// Show a loading icon.
void showLoadingIcon()
{
int frameFlags = 0;
frameFlags |= VRAPI_FRAME_FLAG_FLUSH;
@ -1585,12 +1601,8 @@ void getHMDOrientation(ovrTracking2 *tracking) {//Get orientation
// the new eye images will be displayed. The number of frames predicted ahead
// depends on the pipeline depth of the engine and the synthesis rate.
// The better the prediction, the less black will be pulled in at the edges.
const double predictedDisplayTime = vrapi_GetPredictedDisplayTime(gAppState.Ovr,
gAppState.FrameIndex);
*tracking = vrapi_GetPredictedTracking2(gAppState.Ovr,
predictedDisplayTime);
*tracking = vrapi_GetPredictedTracking2(gAppState.Ovr, gAppState.DisplayTime);
gAppState.DisplayTime = predictedDisplayTime;
// We extract Yaw, Pitch, Roll instead of directly using the orientation
// to allow "additional" yaw manipulation with mouse/controller.
@ -1605,165 +1617,134 @@ void getHMDOrientation(ovrTracking2 *tracking) {//Get orientation
ALOGV(" HMD-Position: %f, %f, %f", positionHmd.x, positionHmd.y, positionHmd.z);
}
void vr_main()
void shutdownVR() {
ovrRenderer_Destroy( &gAppState.Renderer );
ovrEgl_DestroyContext( &gAppState.Egl );
(*java.Vm)->DetachCurrentThread( java.Vm );
vrapi_Shutdown();
}
ovrSubmitFrameDescription2 setupFrameDescriptor(ovrTracking2 *tracking) {
ovrSubmitFrameDescription2 frameDesc = {0 };
if (!useScreenLayer()) {
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
layer.HeadPose = (*tracking).HeadPose;
for ( int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; eye++ )
{
ovrFramebuffer * frameBuffer = &gAppState.Renderer.FrameBuffer[gAppState.Renderer.NumBuffers == 1 ? 0 : eye];
layer.Textures[eye].ColorSwapChain = frameBuffer->ColorTextureSwapChain;
layer.Textures[eye].SwapChainIndex = frameBuffer->TextureSwapChainIndex;
ovrMatrix4f projectionMatrix;
projectionMatrix = ovrMatrix4f_CreateProjectionFov(vrFOV, vrFOV,
0.0f, 0.0f, 0.1f, 0.0f);
layer.Textures[eye].TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection(&projectionMatrix);
layer.Textures[eye].TextureRect.x = 0;
layer.Textures[eye].TextureRect.y = 0;
layer.Textures[eye].TextureRect.width = 1.0f;
layer.Textures[eye].TextureRect.height = 1.0f;
}
layer.Header.Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;
// Set up the description for this frame.
const ovrLayerHeader2 *layers[] =
{
&layer.Header
};
ovrSubmitFrameDescription2 frameDesc = {};
frameDesc.Flags = 0;
frameDesc.SwapInterval = gAppState.SwapInterval;
frameDesc.FrameIndex = gAppState.FrameIndex;
frameDesc.DisplayTime = gAppState.DisplayTime;
frameDesc.LayerCount = 1;
frameDesc.Layers = layers;
} else {
// Set-up the compositor layers for this frame.
// NOTE: Multiple independent layers are allowed, but they need to be added
// in a depth consistent order.
memset( gAppState.Layers, 0, sizeof( ovrLayer_Union2 ) * ovrMaxLayerCount );
gAppState.LayerCount = 0;
// Add a simple cylindrical layer
gAppState.Layers[gAppState.LayerCount++].Cylinder =
BuildCylinderLayer(&gAppState.Scene.CylinderRenderer,
gAppState.Scene.CylinderWidth, gAppState.Scene.CylinderHeight, tracking, radians(playerYaw) );
// Compose the layers for this frame.
const ovrLayerHeader2 * layerHeaders[ovrMaxLayerCount] = { 0 };
for ( int i = 0; i < gAppState.LayerCount; i++ )
{
layerHeaders[i] = &gAppState.Layers[i].Header;
}
// Set up the description for this frame.
frameDesc.Flags = 0;
frameDesc.SwapInterval = gAppState.SwapInterval;
frameDesc.FrameIndex = gAppState.FrameIndex;
frameDesc.DisplayTime = gAppState.DisplayTime;
frameDesc.LayerCount = gAppState.LayerCount;
frameDesc.Layers = layerHeaders;
}
return frameDesc;
}
void incrementFrameIndex()
{
for ( bool destroyed = false; destroyed == false; )
{
destroyed = processMessageQueue(destroyed);
// This is the only place the frame index is incremented, right before
// calling vrapi_GetPredictedDisplayTime().
gAppState.FrameIndex++;
gAppState.DisplayTime = vrapi_GetPredictedDisplayTime(gAppState.Ovr,
gAppState.FrameIndex);
}
if ( gAppState.Ovr == NULL )
{
continue;
}
void getTrackedRemotesOrientation() {//Get info for tracked remotes
acquireTrackedRemotesData(gAppState.Ovr, gAppState.DisplayTime);
// Create the scene if not yet created.
// The scene is created here to be able to show a loading icon.
if ( !ovrScene_IsCreated( &gAppState.Scene ) )
{
ovrScene_Create( cylinderSize[0], cylinderSize[1], &gAppState.Scene, &java );
}
// This is the only place the frame index is incremented, right before
// calling vrapi_GetPredictedDisplayTime().
gAppState.FrameIndex++;
// Create the scene if not yet created.
// The scene is created here to be able to show a loading icon.
if (!qzdoom_initialised || runStatus != -1)
{
showLoadingIcon();
}
processHaptics();
if (runStatus == -1) {
ovrTracking2 tracking;
getHMDOrientation(&tracking);
//Get info for tracked remotes
acquireTrackedRemotesData(gAppState.Ovr, gAppState.DisplayTime);
//Call additional control schemes here
// switch ((int)vr_control_scheme->value)
{
//Call additional control schemes here
// switch ((int)vr_control_scheme->value)
{
// case RIGHT_HANDED_DEFAULT:
HandleInput_Default(&rightTrackedRemoteState_new, &rightTrackedRemoteState_old, &rightRemoteTracking_new,
&leftTrackedRemoteState_new, &leftTrackedRemoteState_old, &leftRemoteTracking_new,
ovrButton_A, ovrButton_B, ovrButton_X, ovrButton_Y);
HandleInput_Default(&rightTrackedRemoteState_new, &rightTrackedRemoteState_old, &rightRemoteTracking_new,
&leftTrackedRemoteState_new, &leftTrackedRemoteState_old, &leftRemoteTracking_new,
ovrButton_A, ovrButton_B, ovrButton_X, ovrButton_Y);
// break;
// case LEFT_HANDED_DEFAULT:
// HandleInput_Default(&leftTrackedRemoteState_new, &leftTrackedRemoteState_old, &leftRemoteTracking_new,
// &rightTrackedRemoteState_new, &rightTrackedRemoteState_old, &rightRemoteTracking_new,
// ovrButton_X, ovrButton_Y, ovrButton_A, ovrButton_B);
// break;
}
}
}
ovrSubmitFrameDescription2 frameDesc = { 0 };
if (!useScreenLayer()) {
void submitFrame(ovrSubmitFrameDescription2 *frameDesc)
{
// Hand over the eye images to the time warp.
vrapi_SubmitFrame2(gAppState.Ovr, frameDesc);
}
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
layer.HeadPose = tracking.HeadPose;
for ( int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; eye++ )
{
ovrFramebuffer * frameBuffer = &gAppState.Renderer.FrameBuffer[gAppState.Renderer.NumBuffers == 1 ? 0 : eye];
layer.Textures[eye].ColorSwapChain = frameBuffer->ColorTextureSwapChain;
layer.Textures[eye].SwapChainIndex = frameBuffer->TextureSwapChainIndex;
//Need to replicate this code in gl_oculusquest.cpp
void vr_main()
{
if (!destroyed)
{
processHaptics();
ovrMatrix4f projectionMatrix;
projectionMatrix = ovrMatrix4f_CreateProjectionFov(vrFOV, vrFOV,
0.0f, 0.0f, 0.1f, 0.0f);
ovrTracking2 tracking;
getHMDOrientation(&tracking);
getTrackedRemotesOrientation();
layer.Textures[eye].TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection(&projectionMatrix);
ovrSubmitFrameDescription2 frameDesc = setupFrameDescriptor(&tracking);
layer.Textures[eye].TextureRect.x = 0;
layer.Textures[eye].TextureRect.y = 0;
layer.Textures[eye].TextureRect.width = 1.0f;
layer.Textures[eye].TextureRect.height = 1.0f;
}
layer.Header.Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;
//Call the game drawing code
RenderFrame();
//Call the game drawing code to populate the cylinder layer texture
RenderFrame();
// Set up the description for this frame.
const ovrLayerHeader2 *layers[] =
{
&layer.Header
};
ovrSubmitFrameDescription2 frameDesc = {};
frameDesc.Flags = 0;
frameDesc.SwapInterval = gAppState.SwapInterval;
frameDesc.FrameIndex = gAppState.FrameIndex;
frameDesc.DisplayTime = gAppState.DisplayTime;
frameDesc.LayerCount = 1;
frameDesc.Layers = layers;
// Hand over the eye images to the time warp.
vrapi_SubmitFrame2(gAppState.Ovr, &frameDesc);
} else {
// Set-up the compositor layers for this frame.
// NOTE: Multiple independent layers are allowed, but they need to be added
// in a depth consistent order.
memset( gAppState.Layers, 0, sizeof( ovrLayer_Union2 ) * ovrMaxLayerCount );
gAppState.LayerCount = 0;
// Add a simple cylindrical layer
gAppState.Layers[gAppState.LayerCount++].Cylinder =
BuildCylinderLayer( &gAppState.Scene.CylinderRenderer,
gAppState.Scene.CylinderWidth, gAppState.Scene.CylinderHeight, &tracking, radians(playerYaw) );
//Call the game drawing code to populate the cylinder layer texture
RenderFrame();
// Compose the layers for this frame.
const ovrLayerHeader2 * layerHeaders[ovrMaxLayerCount] = { 0 };
for ( int i = 0; i < gAppState.LayerCount; i++ )
{
layerHeaders[i] = &gAppState.Layers[i].Header;
}
// Set up the description for this frame.
frameDesc.Flags = 0;
frameDesc.SwapInterval = gAppState.SwapInterval;
frameDesc.FrameIndex = gAppState.FrameIndex;
frameDesc.DisplayTime = gAppState.DisplayTime;
frameDesc.LayerCount = gAppState.LayerCount;
frameDesc.Layers = layerHeaders;
// Hand over the eye images to the time warp.
vrapi_SubmitFrame2(gAppState.Ovr, &frameDesc);
}
}
else
{
//We are now shutting down
if (runStatus == 0)
{
//Give us half a second (36 frames)
shutdownCountdown = 36;
runStatus++;
} else if (runStatus == 1)
{
if (--shutdownCountdown == 0) {
runStatus++;
}
} else if (runStatus == 2)
{
//TODO
//Host_Shutdown();
runStatus++;
} else if (runStatus == 3)
{
ovrRenderer_Destroy( &gAppState.Renderer );
ovrEgl_DestroyContext( &gAppState.Egl );
(*java.Vm)->DetachCurrentThread( java.Vm );
vrapi_Shutdown();
exit( 0 );
}
}
// Hand over the eye images to the time warp.
submitFrame(&frameDesc);
}
}

View file

@ -56,6 +56,7 @@ int ducked;
bool player_moving;
void shutdownVR();
float radians(float deg);
float degrees(float rad);
@ -68,11 +69,20 @@ void rotateAboutOrigin(float v1, float v2, float rotation, vec2_t out);
void QuatToYawPitchRoll(ovrQuatf q, float pitchAdjust, vec3_t out);
bool useScreenLayer();
void handleTrackedControllerButton(ovrInputStateTrackedRemote * trackedRemoteState, ovrInputStateTrackedRemote * prevTrackedRemoteState, uint32_t button, int key);
void Android_GetScreenRes(uint32_t *width, uint32_t *height);
void setUseScreenLayer(bool use);
void processHaptics();
void getHMDOrientation(ovrTracking2 *tracking);
void getTrackedRemotesOrientation();
void incrementFrameIndex();
ovrSubmitFrameDescription2 setupFrameDescriptor(ovrTracking2 *tracking);
void prepareEyeBuffer(int eye );
void finishEyeBuffer(int eye );
void submitFrame(ovrSubmitFrameDescription2 *frameDesc);
#ifdef __cplusplus
} // extern "C"

View file

@ -29,6 +29,7 @@
#include <string>
#include <map>
#include <cmath>
#include "gl/system/gl_system.h"
#include "doomtype.h" // Printf
#include "d_player.h"
@ -51,10 +52,6 @@
#include "QzDoom/VrCommon.h"
extern "C" {
#include <VrApi.h>
#include <VrApi_Helpers.h>
}
EXTERN_CVAR(Int, screenblocks);
EXTERN_CVAR(Float, movebob);
@ -153,35 +150,13 @@ namespace s3d
bool OculusQuestEyePose::submitFrame() const
{
// Copy HDR game texture to local vr LDR framebuffer, so gamma correction could work
/* if (eyeTexture->handle == nullptr) {
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
prepareEyeBuffer( eye );
GLuint texture;
glGenTextures(1, &texture);
eyeTexture->handle = (void *)(std::ptrdiff_t)texture;
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, GLRenderer->mSceneViewport.width,
GLRenderer->mSceneViewport.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
GLenum drawBuffers[1] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1, drawBuffers);
}
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
return false;
GLRenderer->mBuffers->BindEyeTexture(eye, 0);
GL_IRECT box = {0, 0, GLRenderer->mSceneViewport.width, GLRenderer->mSceneViewport.height};
GLRenderer->DrawPresentTexture(box, true);
// Maybe this would help with AMD boards?
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
*/
finishEyeBuffer( eye );
return true;
}
@ -250,9 +225,9 @@ namespace s3d
const Stereo3DMode * mode3d = &Stereo3DMode::getCurrentMode();
if (mode3d->IsMono())
return;
const OculusQuestMode * openVrMode = static_cast<const OculusQuestMode *>(mode3d);
if (openVrMode
&& openVrMode->crossHairDrawer
const OculusQuestMode * oculusQuestMode = static_cast<const OculusQuestMode *>(mode3d);
if (oculusQuestMode
&& oculusQuestMode->crossHairDrawer
// Don't draw the crosshair if there is none
&& CrosshairImage != NULL
&& gamestate != GS_TITLELEVEL
@ -266,7 +241,7 @@ namespace s3d
false,
0.0);
gl_RenderState.ApplyMatrices();
openVrMode->crossHairDrawer->Draw();
oculusQuestMode->crossHairDrawer->Draw();
}
// Update HUD matrix to render on a separate quad
@ -290,6 +265,14 @@ namespace s3d
gl_RenderState.ApplyMatrices();
}
/* static */
const Stereo3DMode& OculusQuestMode::getInstance()
{
static OculusQuestMode instance;
return instance;
}
OculusQuestMode::OculusQuestMode()
: leftEyeView(0)
, rightEyeView(1)
@ -299,9 +282,8 @@ namespace s3d
{
eye_ptrs.Push(&leftEyeView); // initially default behavior to Mono non-stereo rendering
//Get this from my code
//vrSystem->GetRecommendedRenderTargetSize(&sceneWidth, &sceneHeight);
Android_GetScreenRes(&sceneWidth, &sceneHeight);
leftEyeView.initialize();
rightEyeView.initialize();
@ -449,6 +431,13 @@ namespace s3d
leftEyeView.submitFrame();
rightEyeView.submitFrame();
submitFrame(&frameDesc);
}
static int mAngleFromRadians(double radians)
{
double m = std::round(65535.0 * radians / (2.0 * M_PI));
return int(m);
}
void OculusQuestMode::updateHmdPose(
@ -470,7 +459,7 @@ namespace s3d
havePreviousYaw = true;
}
hmdYawDelta = hmdYaw - previousHmdYaw;
// G_AddViewAngle(mAngleFromRadians(-hmdYawDelta));
G_AddViewAngle(mAngleFromRadians(-hmdYawDelta));
previousHmdYaw = hmdYaw;
}
@ -484,7 +473,7 @@ namespace s3d
// hmdPitchInDoom
-hmdpitch
- viewPitchInDoom;
// G_AddViewPitch(mAngleFromRadians(-dPitch));
G_AddViewPitch(mAngleFromRadians(-dPitch));
}
// Roll can be local, because it doesn't affect gameplay.
@ -659,7 +648,15 @@ namespace s3d
setUseScreenLayer(true);
}
processHaptics();
//Get controller state here
ovrTracking2 tracking;
getHMDOrientation(&tracking);
getTrackedRemotesOrientation();
frameDesc = setupFrameDescriptor(&tracking);
/* player_t* player = r_viewpoint.camera ? r_viewpoint.camera->player : nullptr;
{

View file

@ -31,6 +31,12 @@
#include "gl_stereo3d.h"
#include "gl_stereo_leftright.h"
extern "C" {
#include <VrApi.h>
#include <VrApi_Types.h>
#include <VrApi_Helpers.h>
}
/* stereoscopic 3D API */
namespace s3d {
@ -100,6 +106,7 @@ protected:
mutable int cachedViewwidth, cachedViewheight, cachedViewwindowx, cachedViewwindowy;
mutable F2DDrawer * cached2DDrawer;
mutable F2DDrawer * crossHairDrawer;
mutable ovrSubmitFrameDescription2 frameDesc;
private:
typedef Stereo3DMode super;

View file

@ -31,11 +31,12 @@
#include "gl/stereo3d/gl_quadstereo.h"
#include "gl/stereo3d/gl_sidebyside3d.h"
#include "gl/stereo3d/gl_interleaved3d.h"
#include "gl/stereo3d/gl_oculusquest.h"
#include "gl/system/gl_cvars.h"
#include "version.h"
// Set up 3D-specific console variables:
CVAR(Int, vr_mode, 10, CVAR_GLOBALCONFIG)
CVAR(Int, vr_mode, 15, CVAR_GLOBALCONFIG)
// switch left and right eye views
CVAR(Bool, vr_swap_eyes, false, CVAR_GLOBALCONFIG)
@ -83,7 +84,7 @@ void Stereo3DMode::setCurrentMode(const Stereo3DMode& mode) {
/* static */
const Stereo3DMode& Stereo3DMode::getCurrentMode()
{
if (gl.legacyMode) vr_mode = 0; // GL 2 does not support this feature.
//if (gl.legacyMode) vr_mode = 0; // GL 2 does not support this feature.
// NOTE: Ensure that these vr_mode values correspond to the ones in wadsrc/static/menudef.z
switch (vr_mode)
@ -117,7 +118,7 @@ const Stereo3DMode& Stereo3DMode::getCurrentMode()
// TODO: 8: Oculus Rift
case 9:
setCurrentMode(AmberBlue::getInstance(vr_ipd));
break;
break;
// TODO: 10: HTC Vive/OpenVR
case 11:
setCurrentMode(TopBottom3D::getInstance(vr_ipd));
@ -131,6 +132,9 @@ const Stereo3DMode& Stereo3DMode::getCurrentMode()
case 14:
setCurrentMode(CheckerInterleaved3D::getInstance(vr_ipd));
break;
case 15:
setCurrentMode(OculusQuestMode::getInstance());
break;
case 0:
default:
setCurrentMode(MonoView::getInstance());