Fix displayed color/depth/stencil bits values

The values passed to SDL are just the minimum required. Get actually
values of colorbits, depthbits, and stencilbits from SDL instead of
assuming that the engine got exactly what it asked for.
This commit is contained in:
Zack Middleton 2018-04-11 01:14:24 -05:00
parent 43602a7eb8
commit 7e9b92de12

View file

@ -451,6 +451,7 @@ static int GLimp_SetMode(int mode, qboolean fullscreen, qboolean noborder, qbool
for (i = 0; i < 16; i++)
{
int testColorBits, testDepthBits, testStencilBits;
int realColorBits[3];
// 0 - default
// 1 - minus colorBits
@ -664,9 +665,13 @@ static int GLimp_SetMode(int mode, qboolean fullscreen, qboolean noborder, qbool
ri.Printf( PRINT_DEVELOPER, "SDL_GL_SetSwapInterval failed: %s\n", SDL_GetError( ) );
}
glConfig.colorBits = testColorBits;
glConfig.depthBits = testDepthBits;
glConfig.stencilBits = testStencilBits;
SDL_GL_GetAttribute( SDL_GL_RED_SIZE, &realColorBits[0] );
SDL_GL_GetAttribute( SDL_GL_GREEN_SIZE, &realColorBits[1] );
SDL_GL_GetAttribute( SDL_GL_BLUE_SIZE, &realColorBits[2] );
SDL_GL_GetAttribute( SDL_GL_DEPTH_SIZE, &glConfig.depthBits );
SDL_GL_GetAttribute( SDL_GL_STENCIL_SIZE, &glConfig.stencilBits );
glConfig.colorBits = realColorBits[0] + realColorBits[1] + realColorBits[2];
ri.Printf( PRINT_ALL, "Using %d color bits, %d depth, %d stencil display.\n",
glConfig.colorBits, glConfig.depthBits, glConfig.stencilBits );