- enable core profile by default on GL 3.x.

After doing some profiling it was very obvious that this has better performance than client arrays. Persistent buffers are still better, though, especially for handling dynamic lights.
This commit is contained in:
Christoph Oelckers 2016-08-29 11:33:20 +02:00
parent 0f0dc2c852
commit 8f535997f8
5 changed files with 14 additions and 66 deletions

View File

@ -148,6 +148,7 @@ FFlatVertexBuffer::FFlatVertexBuffer(int width, int height)
glBindBuffer(GL_ARRAY_BUFFER, vbo_id); glBindBuffer(GL_ARRAY_BUFFER, vbo_id);
glBufferStorage(GL_ARRAY_BUFFER, bytesize, NULL, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT); glBufferStorage(GL_ARRAY_BUFFER, bytesize, NULL, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT);
map = (FFlatVertex*)glMapBufferRange(GL_ARRAY_BUFFER, 0, bytesize, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT); map = (FFlatVertex*)glMapBufferRange(GL_ARRAY_BUFFER, 0, bytesize, GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT | GL_MAP_COHERENT_BIT);
DPrintf(DMSG_NOTIFY, "Using persistent buffer\n");
break; break;
} }
@ -157,13 +158,14 @@ FFlatVertexBuffer::FFlatVertexBuffer(int width, int height)
glBindBuffer(GL_ARRAY_BUFFER, vbo_id); glBindBuffer(GL_ARRAY_BUFFER, vbo_id);
glBufferData(GL_ARRAY_BUFFER, bytesize, NULL, GL_STREAM_DRAW); glBufferData(GL_ARRAY_BUFFER, bytesize, NULL, GL_STREAM_DRAW);
map = nullptr; map = nullptr;
DPrintf(DMSG_NOTIFY, "Using deferred buffer\n");
break; break;
} }
case BM_CLIENTARRAY: case BM_CLIENTARRAY:
{ {
// The fallback path uses immediate mode rendering and does not set up an actual vertex buffer
map = new FFlatVertex[BUFFER_SIZE]; map = new FFlatVertex[BUFFER_SIZE];
DPrintf(DMSG_NOTIFY, "Using client array buffer\n");
break; break;
} }
} }

View File

@ -130,7 +130,7 @@ void FGLRenderer::Initialize(int width, int height)
m2DDrawer = new F2DDrawer; m2DDrawer = new F2DDrawer;
// Only needed for the core profile, because someone decided it was a good idea to remove the default VAO. // Only needed for the core profile, because someone decided it was a good idea to remove the default VAO.
if (gl.version >= 4.0) if (gl.buffermethod != BM_CLIENTARRAY)
{ {
glGenVertexArrays(1, &mVAOID); glGenVertexArrays(1, &mVAOID);
glBindVertexArray(mVAOID); glBindVertexArray(mVAOID);

View File

@ -156,14 +156,6 @@ void gl_LoadExtensions()
gl.version = strtod(version, NULL) + 0.01f; gl.version = strtod(version, NULL) + 0.01f;
bool iscore = false;
if (gl.version >= 3.2)
{
int v;
glGetIntegerv(GL_CONTEXT_PROFILE_MASK, &v);
iscore = !!(v & GL_CONTEXT_CORE_PROFILE_BIT);
}
// Don't even start if it's lower than 2.0 or no framebuffers are available // Don't even start if it's lower than 2.0 or no framebuffers are available
if ((gl.version < 2.0 || !CheckExtension("GL_EXT_framebuffer_object")) && gl.version < 3.0) if ((gl.version < 2.0 || !CheckExtension("GL_EXT_framebuffer_object")) && gl.version < 3.0)
{ {
@ -186,11 +178,8 @@ void gl_LoadExtensions()
if (gl.version > 3.0f && (gl.version >= 3.3f || CheckExtension("GL_ARB_uniform_buffer_object"))) if (gl.version > 3.0f && (gl.version >= 3.3f || CheckExtension("GL_ARB_uniform_buffer_object")))
{ {
gl.lightmethod = LM_DEFERRED; gl.lightmethod = LM_DEFERRED;
if (iscore)
{
gl.buffermethod = BM_DEFERRED; gl.buffermethod = BM_DEFERRED;
} }
}
if (CheckExtension("GL_ARB_texture_compression")) gl.flags |= RFL_TEXTURE_COMPRESSION; if (CheckExtension("GL_ARB_texture_compression")) gl.flags |= RFL_TEXTURE_COMPRESSION;
if (CheckExtension("GL_EXT_texture_compression_s3tc")) gl.flags |= RFL_TEXTURE_COMPRESSION_S3TC; if (CheckExtension("GL_EXT_texture_compression_s3tc")) gl.flags |= RFL_TEXTURE_COMPRESSION_S3TC;

View File

@ -737,42 +737,6 @@ bool Win32GLVideo::SetupPixelFormat(int multisample)
// //
//========================================================================== //==========================================================================
// since we cannot use the extension loader here, before it gets initialized,
// we have to define the extended GL stuff we need, ourselves here.
// The headers generated by GLLoadGen only work if the loader gets initialized.
typedef const GLubyte * (APIENTRY *PFNGLGETSTRINGIPROC)(GLenum, GLuint);
#define GL_NUM_EXTENSIONS 0x821D
bool Win32GLVideo::checkCoreUsability()
{
const char *version = Args->CheckValue("-glversion");
if (version != NULL)
{
if (strtod(version, NULL) < 4.0) return false;
}
if (Args->CheckParm("-noshader")) return false;
// GL 4.4 implies GL_ARB_buffer_storage
if (strcmp((char*)glGetString(GL_VERSION), "4.4") >= 0) return true;
// at this point the extension loader has not been initialized so we have to retrieve glGetStringi ourselves.
PFNGLGETSTRINGIPROC myglGetStringi = (PFNGLGETSTRINGIPROC)wglGetProcAddress("glGetStringi");
if (!myglGetStringi) return false; // this should not happen.
const char *extension;
int max = 0;
glGetIntegerv(GL_NUM_EXTENSIONS, &max);
// step through all reported extensions and see if we got what we need...
for (int i = 0; i < max; i++)
{
extension = (const char*)myglGetStringi(GL_EXTENSIONS, i);
if (!strcmp(extension, "GL_ARB_buffer_storage")) return true;
}
return false;
}
bool Win32GLVideo::InitHardware (HWND Window, int multisample) bool Win32GLVideo::InitHardware (HWND Window, int multisample)
{ {
m_Window=Window; m_Window=Window;
@ -783,7 +747,14 @@ bool Win32GLVideo::InitHardware (HWND Window, int multisample)
return false; return false;
} }
for (int prof = WGL_CONTEXT_CORE_PROFILE_BIT_ARB; prof <= WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB; prof++) int prof = WGL_CONTEXT_CORE_PROFILE_BIT_ARB;
const char *lm = Args->CheckValue("-buffermethod");
if (lm != NULL)
{
if (!stricmp(lm, "clientarray")) prof = WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB;
}
for (; prof <= WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB; prof++)
{ {
m_hRC = NULL; m_hRC = NULL;
if (myWglCreateContextAttribsARB != NULL) if (myWglCreateContextAttribsARB != NULL)
@ -802,8 +773,6 @@ bool Win32GLVideo::InitHardware (HWND Window, int multisample)
0 0
}; };
//Printf("Trying to create an OpenGL %d.%d %s profile context\n", versions[i] / 10, versions[i] % 10, prof == WGL_CONTEXT_CORE_PROFILE_BIT_ARB ? "Core" : "Compatibility");
m_hRC = myWglCreateContextAttribsARB(m_hDC, 0, ctxAttribs); m_hRC = myWglCreateContextAttribsARB(m_hDC, 0, ctxAttribs);
if (m_hRC != NULL) break; if (m_hRC != NULL) break;
} }
@ -811,7 +780,6 @@ bool Win32GLVideo::InitHardware (HWND Window, int multisample)
if (m_hRC == NULL && prof == WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB) if (m_hRC == NULL && prof == WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB)
{ {
m_hRC = wglCreateContext(m_hDC); m_hRC = wglCreateContext(m_hDC);
if (m_hRC == NULL) if (m_hRC == NULL)
{ {
@ -823,19 +791,9 @@ bool Win32GLVideo::InitHardware (HWND Window, int multisample)
if (m_hRC != NULL) if (m_hRC != NULL)
{ {
wglMakeCurrent(m_hDC, m_hRC); wglMakeCurrent(m_hDC, m_hRC);
// we can only use core profile contexts if GL_ARB_buffer_storage is supported or GL version is >= 4.4
if (prof == WGL_CONTEXT_CORE_PROFILE_BIT_ARB && !checkCoreUsability())
{
wglMakeCurrent(0, 0);
wglDeleteContext(m_hRC);
}
else
{
return true; return true;
} }
} }
}
// We get here if the driver doesn't support the modern context creation API which always means an old driver. // We get here if the driver doesn't support the modern context creation API which always means an old driver.
I_Error ("R_OPENGL: Unable to create an OpenGL render context. Insufficient driver support for context creation\n"); I_Error ("R_OPENGL: Unable to create an OpenGL render context. Insufficient driver support for context creation\n");
return false; return false;

View File

@ -86,7 +86,6 @@ protected:
void MakeModesList(); void MakeModesList();
void AddMode(int x, int y, int bits, int baseHeight, int refreshHz); void AddMode(int x, int y, int bits, int baseHeight, int refreshHz);
void FreeModes(); void FreeModes();
bool checkCoreUsability();
public: public:
int GetTrueHeight() { return m_trueHeight; } int GetTrueHeight() { return m_trueHeight; }