mirror of
https://github.com/ZDoom/gzdoom.git
synced 2024-11-11 15:21:51 +00:00
- handle normals for spheremapped mirror surfaces using non-deprecated features.
- move all WGL references out of global header files so that global wgl header include is no longer necessary
This commit is contained in:
parent
ed5ee4e8d1
commit
84a49e37ee
4 changed files with 26 additions and 11 deletions
|
@ -321,7 +321,13 @@ void GLWall::RenderMirrorSurface()
|
|||
// For the sphere map effect we need a normal of the mirror surface,
|
||||
Vector v(glseg.y2-glseg.y1, 0 ,-glseg.x2+glseg.x1);
|
||||
v.Normalize();
|
||||
glNormal3fv(&v[0]);
|
||||
|
||||
// we use texture coordinates and texture matrix to pass the normal stuff to the shader so that the default vertex buffer format can be used as is.
|
||||
lolft.u = lorgt.u = uplft.u = uprgt.u = v.X();
|
||||
lolft.v = lorgt.v = uplft.v = uprgt.v = v.Z();
|
||||
|
||||
gl_RenderState.EnableTextureMatrix(true);
|
||||
gl_RenderState.mTextureMatrix.computeNormalMatrix(gl_RenderState.mViewMatrix);
|
||||
|
||||
// Use sphere mapping for this
|
||||
gl_RenderState.SetEffect(EFF_SPHEREMAP);
|
||||
|
@ -338,6 +344,7 @@ void GLWall::RenderMirrorSurface()
|
|||
flags &= ~GLWF_GLOW;
|
||||
RenderWall(RWF_BLANK);
|
||||
|
||||
gl_RenderState.EnableTextureMatrix(false);
|
||||
gl_RenderState.SetEffect(EFF_NONE);
|
||||
|
||||
// Restore the defaults for the translucent pass
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
#include "gl/system/gl_system.h"
|
||||
|
||||
#define DWORD WINDOWS_DWORD
|
||||
#include <GL/wglew.h>
|
||||
#undef DWORD
|
||||
|
||||
|
||||
#include "win32iface.h"
|
||||
#include "win32gliface.h"
|
||||
//#include "gl/gl_intern.h"
|
||||
|
@ -22,6 +27,12 @@
|
|||
void gl_CalculateCPUSpeed();
|
||||
extern int NewWidth, NewHeight, NewBits, DisplayBits;
|
||||
|
||||
// these get used before GLEW is initialized so we have to use separate pointers with different names
|
||||
PFNWGLCHOOSEPIXELFORMATARBPROC myWglChoosePixelFormatARB; // = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB");
|
||||
PFNWGLCREATECONTEXTATTRIBSARBPROC myWglCreateContextAttribsARB;
|
||||
PFNWGLSWAPINTERVALEXTPROC vsyncfunc;
|
||||
|
||||
|
||||
CUSTOM_CVAR(Int, gl_vid_multisample, 0, CVAR_ARCHIVE | CVAR_GLOBALCONFIG | CVAR_NOINITCALL )
|
||||
{
|
||||
Printf("This won't take effect until "GAMENAME" is restarted.\n");
|
||||
|
@ -581,8 +592,8 @@ bool Win32GLVideo::SetPixelFormat()
|
|||
hRC = wglCreateContext(hDC);
|
||||
wglMakeCurrent(hDC, hRC);
|
||||
|
||||
wglChoosePixelFormatARB = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB");
|
||||
wglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB");
|
||||
myWglChoosePixelFormatARB = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB");
|
||||
myWglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB");
|
||||
// any extra stuff here?
|
||||
|
||||
wglMakeCurrent(NULL, NULL);
|
||||
|
@ -612,7 +623,7 @@ bool Win32GLVideo::SetupPixelFormat(bool allowsoftware, int multisample)
|
|||
colorDepth = GetDeviceCaps(deskDC, BITSPIXEL);
|
||||
ReleaseDC(GetDesktopWindow(), deskDC);
|
||||
|
||||
if (wglChoosePixelFormatARB)
|
||||
if (myWglChoosePixelFormatARB)
|
||||
{
|
||||
attributes[0] = WGL_RED_BITS_ARB; //bits
|
||||
attributes[1] = 8;
|
||||
|
@ -662,7 +673,7 @@ bool Win32GLVideo::SetupPixelFormat(bool allowsoftware, int multisample)
|
|||
attributes[24] = 0;
|
||||
attributes[25] = 0;
|
||||
|
||||
if (!wglChoosePixelFormatARB(m_hDC, attributes, attribsFloat, 1, &pixelFormat, &numFormats))
|
||||
if (!myWglChoosePixelFormatARB(m_hDC, attributes, attribsFloat, 1, &pixelFormat, &numFormats))
|
||||
{
|
||||
Printf("R_OPENGL: Couldn't choose pixel format. Retrying in compatibility mode\n");
|
||||
goto oldmethod;
|
||||
|
@ -736,7 +747,7 @@ bool Win32GLVideo::InitHardware (HWND Window, bool allowsoftware, int multisampl
|
|||
}
|
||||
|
||||
m_hRC = 0;
|
||||
if (wglCreateContextAttribsARB != NULL)
|
||||
if (myWglCreateContextAttribsARB != NULL)
|
||||
{
|
||||
int ctxAttribs[] = {
|
||||
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
|
||||
|
@ -746,7 +757,7 @@ bool Win32GLVideo::InitHardware (HWND Window, bool allowsoftware, int multisampl
|
|||
0
|
||||
};
|
||||
|
||||
m_hRC = wglCreateContextAttribsARB(m_hDC, 0, ctxAttribs);
|
||||
m_hRC = myWglCreateContextAttribsARB(m_hDC, 0, ctxAttribs);
|
||||
}
|
||||
if (m_hRC == 0)
|
||||
{
|
||||
|
|
|
@ -46,8 +46,6 @@ public:
|
|||
void Shutdown();
|
||||
bool SetFullscreen(const char *devicename, int w, int h, int bits, int hz);
|
||||
|
||||
PFNWGLCHOOSEPIXELFORMATARBPROC wglChoosePixelFormatARB; // = (PFNWGLCHOOSEPIXELFORMATARBPROC)wglGetProcAddress("wglChoosePixelFormatARB");
|
||||
PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB;
|
||||
HDC m_hDC;
|
||||
|
||||
protected:
|
||||
|
@ -107,7 +105,6 @@ public:
|
|||
Win32GLFrameBuffer(void *hMonitor, int width, int height, int bits, int refreshHz, bool fullscreen);
|
||||
virtual ~Win32GLFrameBuffer();
|
||||
|
||||
PFNWGLSWAPINTERVALEXTPROC vsyncfunc;
|
||||
|
||||
// unused but must be defined
|
||||
virtual void Blank ();
|
||||
|
|
|
@ -52,7 +52,7 @@ void main()
|
|||
|
||||
#ifdef SPHEREMAP
|
||||
vec3 u = normalize(eyeCoordPos.xyz);
|
||||
vec3 n = normalize(gl_NormalMatrix * gl_Normal);
|
||||
vec3 n = normalize(TextureMatrix * vec4(tc.x, 0.0, tc.y, 0.0); // use texture matrix and coordinates for our normal. Since this is only used on walls, the normal's y coordinate is always 0.
|
||||
vec3 r = reflect(u, n);
|
||||
float m = 2.0 * sqrt( r.x*r.x + r.y*r.y + (r.z+1.0)*(r.z+1.0) );
|
||||
vec2 sst = vec2(r.x/m + 0.5, r.y/m + 0.5);
|
||||
|
|
Loading…
Reference in a new issue