From 84a49e37eed36e939e05536bcd9859a751452b2b Mon Sep 17 00:00:00 2001 From: Christoph Oelckers Date: Mon, 14 Jul 2014 19:54:07 +0200 Subject: [PATCH] - handle normals for spheremapped mirror surfaces using non-deprecated features. - move all WGL references out of global header files so that global wgl header include is no longer necessary --- src/gl/scene/gl_walls_draw.cpp | 9 ++++++++- src/win32/win32gliface.cpp | 23 +++++++++++++++++------ src/win32/win32gliface.h | 3 --- wadsrc/static/shaders/glsl/main.vp | 2 +- 4 files changed, 26 insertions(+), 11 deletions(-) diff --git a/src/gl/scene/gl_walls_draw.cpp b/src/gl/scene/gl_walls_draw.cpp index aabec775e4..b932691de4 100644 --- a/src/gl/scene/gl_walls_draw.cpp +++ b/src/gl/scene/gl_walls_draw.cpp @@ -321,7 +321,13 @@ void GLWall::RenderMirrorSurface() // For the sphere map effect we need a normal of the mirror surface, Vector v(glseg.y2-glseg.y1, 0 ,-glseg.x2+glseg.x1); v.Normalize(); - glNormal3fv(&v[0]); + + // we use texture coordinates and texture matrix to pass the normal stuff to the shader so that the default vertex buffer format can be used as is. + lolft.u = lorgt.u = uplft.u = uprgt.u = v.X(); + lolft.v = lorgt.v = uplft.v = uprgt.v = v.Z(); + + gl_RenderState.EnableTextureMatrix(true); + gl_RenderState.mTextureMatrix.computeNormalMatrix(gl_RenderState.mViewMatrix); // Use sphere mapping for this gl_RenderState.SetEffect(EFF_SPHEREMAP); @@ -338,6 +344,7 @@ void GLWall::RenderMirrorSurface() flags &= ~GLWF_GLOW; RenderWall(RWF_BLANK); + gl_RenderState.EnableTextureMatrix(false); gl_RenderState.SetEffect(EFF_NONE); // Restore the defaults for the translucent pass diff --git a/src/win32/win32gliface.cpp b/src/win32/win32gliface.cpp index 5623b7062d..7b70e91722 100644 --- a/src/win32/win32gliface.cpp +++ b/src/win32/win32gliface.cpp @@ -1,5 +1,10 @@ #include "gl/system/gl_system.h" +#define DWORD WINDOWS_DWORD +#include +#undef DWORD + + #include "win32iface.h" #include "win32gliface.h" //#include "gl/gl_intern.h" @@ -22,6 +27,12 @@ void gl_CalculateCPUSpeed(); extern int NewWidth, NewHeight, NewBits, DisplayBits; +// these get used before GLEW is initialized so we have to use separate pointers with different names +PFNWGLCHOOSEPIXELFORMATARBPROC myWglChoosePixelFormatARB; // = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB"); +PFNWGLCREATECONTEXTATTRIBSARBPROC myWglCreateContextAttribsARB; +PFNWGLSWAPINTERVALEXTPROC vsyncfunc; + + CUSTOM_CVAR(Int, gl_vid_multisample, 0, CVAR_ARCHIVE | CVAR_GLOBALCONFIG | CVAR_NOINITCALL ) { Printf("This won't take effect until "GAMENAME" is restarted.\n"); @@ -581,8 +592,8 @@ bool Win32GLVideo::SetPixelFormat() hRC = wglCreateContext(hDC); wglMakeCurrent(hDC, hRC); - wglChoosePixelFormatARB = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB"); - wglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB"); + myWglChoosePixelFormatARB = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB"); + myWglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB"); // any extra stuff here? wglMakeCurrent(NULL, NULL); @@ -612,7 +623,7 @@ bool Win32GLVideo::SetupPixelFormat(bool allowsoftware, int multisample) colorDepth = GetDeviceCaps(deskDC, BITSPIXEL); ReleaseDC(GetDesktopWindow(), deskDC); - if (wglChoosePixelFormatARB) + if (myWglChoosePixelFormatARB) { attributes[0] = WGL_RED_BITS_ARB; //bits attributes[1] = 8; @@ -662,7 +673,7 @@ bool Win32GLVideo::SetupPixelFormat(bool allowsoftware, int multisample) attributes[24] = 0; attributes[25] = 0; - if (!wglChoosePixelFormatARB(m_hDC, attributes, attribsFloat, 1, &pixelFormat, &numFormats)) + if (!myWglChoosePixelFormatARB(m_hDC, attributes, attribsFloat, 1, &pixelFormat, &numFormats)) { Printf("R_OPENGL: Couldn't choose pixel format. Retrying in compatibility mode\n"); goto oldmethod; @@ -736,7 +747,7 @@ bool Win32GLVideo::InitHardware (HWND Window, bool allowsoftware, int multisampl } m_hRC = 0; - if (wglCreateContextAttribsARB != NULL) + if (myWglCreateContextAttribsARB != NULL) { int ctxAttribs[] = { WGL_CONTEXT_MAJOR_VERSION_ARB, 3, @@ -746,7 +757,7 @@ bool Win32GLVideo::InitHardware (HWND Window, bool allowsoftware, int multisampl 0 }; - m_hRC = wglCreateContextAttribsARB(m_hDC, 0, ctxAttribs); + m_hRC = myWglCreateContextAttribsARB(m_hDC, 0, ctxAttribs); } if (m_hRC == 0) { diff --git a/src/win32/win32gliface.h b/src/win32/win32gliface.h index 4457c53723..f39a9f70b5 100644 --- a/src/win32/win32gliface.h +++ b/src/win32/win32gliface.h @@ -46,8 +46,6 @@ public: void Shutdown(); bool SetFullscreen(const char *devicename, int w, int h, int bits, int hz); - PFNWGLCHOOSEPIXELFORMATARBPROC wglChoosePixelFormatARB; // = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB"); - PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB; HDC m_hDC; protected: @@ -107,7 +105,6 @@ public: Win32GLFrameBuffer(void *hMonitor, int width, int height, int bits, int refreshHz, bool fullscreen); virtual ~Win32GLFrameBuffer(); - PFNWGLSWAPINTERVALEXTPROC vsyncfunc; // unused but must be defined virtual void Blank (); diff --git a/wadsrc/static/shaders/glsl/main.vp b/wadsrc/static/shaders/glsl/main.vp index f0bbdd413a..58ca931fb2 100644 --- a/wadsrc/static/shaders/glsl/main.vp +++ b/wadsrc/static/shaders/glsl/main.vp @@ -52,7 +52,7 @@ void main() #ifdef SPHEREMAP vec3 u = normalize(eyeCoordPos.xyz); - vec3 n = normalize(gl_NormalMatrix * gl_Normal); + vec3 n = normalize(TextureMatrix * vec4(tc.x, 0.0, tc.y, 0.0); // use texture matrix and coordinates for our normal. Since this is only used on walls, the normal's y coordinate is always 0. vec3 r = reflect(u, n); float m = 2.0 * sqrt( r.x*r.x + r.y*r.y + (r.z+1.0)*(r.z+1.0) ); vec2 sst = vec2(r.x/m + 0.5, r.y/m + 0.5);