Optimized perf by fixing several issues with the GPU<->CPU syncing

This commit is contained in:
Robert Beckebans 2022-03-23 20:22:05 +01:00
parent d4f1d99bd7
commit 62429d8f46
12 changed files with 144 additions and 138 deletions

View file

@ -1510,6 +1510,7 @@ void GfxInfo_f( const idCmdArgs& args )
common->Printf( "GL_RENDERER: %s\n", glConfig.renderer_string );
common->Printf( "GL_VERSION: %s\n", glConfig.version_string );
common->Printf( "GL_EXTENSIONS: %s\n", glConfig.extensions_string );
#if !defined( USE_NVRHI )
if( glConfig.wgl_extensions_string )
{
common->Printf( "WGL_EXTENSIONS: %s\n", glConfig.wgl_extensions_string );
@ -1517,12 +1518,13 @@ void GfxInfo_f( const idCmdArgs& args )
common->Printf( "GL_MAX_TEXTURE_SIZE: %d\n", glConfig.maxTextureSize );
common->Printf( "GL_MAX_TEXTURE_COORDS_ARB: %d\n", glConfig.maxTextureCoords );
common->Printf( "GL_MAX_TEXTURE_IMAGE_UNITS_ARB: %d\n", glConfig.maxTextureImageUnits );
#endif
// print all the display adapters, monitors, and video modes
//void DumpAllDisplayDevices();
//DumpAllDisplayDevices();
common->Printf( "\nPIXELFORMAT: color(%d-bits) Z(%d-bit) stencil(%d-bits)\n", glConfig.colorBits, glConfig.depthBits, glConfig.stencilBits );
//common->Printf( "\nPIXELFORMAT: color(%d-bits) Z(%d-bit) stencil(%d-bits)\n", glConfig.colorBits, glConfig.depthBits, glConfig.stencilBits );
common->Printf( "MODE: %d, %d x %d %s hz:", r_vidMode.GetInteger(), renderSystem->GetWidth(), renderSystem->GetHeight(), fsstrings[r_fullscreen.GetBool()] );
if( glConfig.displayFrequency )
{