mirror of
https://github.com/dhewm/dhewm3.git
synced 2025-03-19 09:11:41 +00:00
ImGui integration: Make sure scalingfactor is > 0, don't use X11 directly
fixes #632 The bug was most probably not caused by D3_SDL_X11 but by GetDefaultDPI() returning -1.0 which GetDefaultScale() then divided by 96 and rounded to 0.0, which is not a good scaling factor. I decided to kick the D3_SDL_X11 special case anyway.
This commit is contained in:
parent
d94bb42d50
commit
b8e701b5e1
2 changed files with 7 additions and 63 deletions
|
@ -285,19 +285,6 @@ if(NOT WIN32)
|
|||
else()
|
||||
message(WARNING "libbacktrace wasn't found. It's not required but recommended, because it provides useful backtraces if dhewm3 crashes")
|
||||
endif()
|
||||
|
||||
# check if our SDL2 supports X11 in SDL_syswm so we can use it for DPI scaling ImGui
|
||||
if(SDL2) # TODO: SDL3? Or just kick this feature?
|
||||
set(CMAKE_REQUIRED_LIBRARIES SDL2)
|
||||
check_c_source_compiles( "#include <SDL_syswm.h>
|
||||
int main() { SDL_SysWMinfo wmInfo = {}; wmInfo.info.x11.display = NULL; return 0; }" HAVE_SDL_X11)
|
||||
unset(CMAKE_REQUIRED_LIBRARIES)
|
||||
|
||||
if(HAVE_SDL_X11)
|
||||
message(STATUS "This SDL2 has X11 support")
|
||||
add_definitions(-DD3_SDL_X11)
|
||||
endif()
|
||||
endif()
|
||||
endif() # NOT WIN32
|
||||
|
||||
# check if this is some kind of clang (Clang, AppleClang, whatever)
|
||||
|
|
|
@ -17,13 +17,6 @@
|
|||
|
||||
#include "sys_imgui.h"
|
||||
|
||||
#ifdef D3_SDL_X11
|
||||
#include <dlfcn.h>
|
||||
#include <SDL_syswm.h>
|
||||
//char *XGetDefault(Display* display, const char* program, const char* option)
|
||||
typedef char* (*MY_XGETDEFAULTFUN)(Display*, const char*, const char*);
|
||||
#endif
|
||||
|
||||
#include "../libs/imgui/backends/imgui_impl_opengl2.h"
|
||||
|
||||
#if SDL_VERSION_ATLEAST(3, 0, 0)
|
||||
|
@ -170,48 +163,6 @@ void ShowWarningOverlay( const char* text )
|
|||
warningOverlayStartPos = ImGui::GetMousePos();
|
||||
}
|
||||
|
||||
#if SDL_MAJOR_VERSION == 2 // not used with SDL3
|
||||
static float GetDefaultDPI()
|
||||
{
|
||||
SDL_Window* win = sdlWindow;
|
||||
float dpi = -1.0f;
|
||||
#ifdef D3_SDL_X11
|
||||
SDL_SysWMinfo wmInfo = {};
|
||||
SDL_VERSION(&wmInfo.version)
|
||||
if(SDL_GetWindowWMInfo(win, &wmInfo) && wmInfo.subsystem == SDL_SYSWM_X11) {
|
||||
Display* display = wmInfo.info.x11.display;
|
||||
|
||||
static void* libX11 = NULL;
|
||||
if(libX11 == NULL) {
|
||||
libX11 = dlopen("libX11.so.6", RTLD_LAZY);
|
||||
}
|
||||
if(libX11 == NULL) {
|
||||
libX11 = dlopen("libX11.so", RTLD_LAZY);
|
||||
}
|
||||
if(libX11 != NULL) {
|
||||
MY_XGETDEFAULTFUN my_xgetdefault = (MY_XGETDEFAULTFUN)dlsym(libX11, "XGetDefault");
|
||||
if(my_xgetdefault != NULL) {
|
||||
//char *XGetDefault(Display* display, const char* program, const char* option)
|
||||
const char* dpiStr = my_xgetdefault(display, "Xft", "dpi");
|
||||
printf("XX dpistr = '%s'\n", dpiStr);
|
||||
if(dpiStr != NULL) {
|
||||
dpi = atof(dpiStr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (dpi == -1.0f)
|
||||
#endif
|
||||
{
|
||||
int winIdx = SDL_GetWindowDisplayIndex( win );
|
||||
if (winIdx >= 0) {
|
||||
SDL_GetDisplayDPI(winIdx, NULL, &dpi, NULL);
|
||||
}
|
||||
}
|
||||
return dpi;
|
||||
}
|
||||
#endif // SDL2-only
|
||||
|
||||
static float GetDefaultScale()
|
||||
{
|
||||
if ( glConfig.winWidth != glConfig.vidWidth ) {
|
||||
|
@ -222,9 +173,15 @@ static float GetDefaultScale()
|
|||
#if SDL_VERSION_ATLEAST(3, 0, 0)
|
||||
float ret = SDL_GetWindowDisplayScale( sdlWindow );
|
||||
#else
|
||||
float dpi = 0.0f;
|
||||
int winIdx = SDL_GetWindowDisplayIndex( win );
|
||||
SDL_GetDisplayDPI((winIdx >= 0) ? winIdx : 0, NULL, &dpi, NULL);
|
||||
// TODO: different reference DPI on mac? also, doesn't work that well on my laptop..
|
||||
float ret = GetDefaultDPI() / 96.0f;
|
||||
float ret = dpi / 96.0f;
|
||||
#endif
|
||||
if ( ret <= 0.0f ) {
|
||||
return 1.0f;
|
||||
}
|
||||
ret = round(ret*2.0)*0.5; // round to .0 or .5
|
||||
return ret;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue