src/video/x11/SDL_x11modes.c
changeset 6558 90f231aa77b9
parent 6553 2f03111a2105
child 6559 1fc5f5116bd0
     1.1 --- a/src/video/x11/SDL_x11modes.c	Thu Oct 04 02:56:25 2012 -0700
     1.2 +++ b/src/video/x11/SDL_x11modes.c	Thu Oct 04 13:50:41 2012 -0700
     1.3 @@ -25,7 +25,17 @@
     1.4  #include "SDL_hints.h"
     1.5  #include "SDL_x11video.h"
     1.6  
     1.7 -/*#define X11MODES_DEBUG*/
     1.8 +#define X11MODES_DEBUG
     1.9 +
    1.10 +/* I'm becoming more and more convinced that the application should never use XRandR,
    1.11 + * and it's the window manager's responsibility to track and manage display modes for
    1.12 + * fullscreen windows.  Right now XRandR is completely broken with respect to window
    1.13 + * manager behavior on every window manager that I can find.  For example, on Unity 3D
    1.14 + * if you show a fullscreen window while the resolution is changing (within ~250 ms)
    1.15 + * your window will retain the fullscreen state hint but be decorated and windowed.
    1.16 +*/
    1.17 +#define XRANDR_DISABLED_BY_DEFAULT
    1.18 +
    1.19  
    1.20  static int
    1.21  get_visualinfo(Display * display, int screen, XVisualInfo * vinfo)
    1.22 @@ -190,12 +200,21 @@
    1.23  
    1.24      /* Allow environment override */
    1.25      env = SDL_GetHint(SDL_HINT_VIDEO_X11_XRANDR);
    1.26 +#ifdef XRANDR_DISABLED_BY_DEFAULT
    1.27 +    if (!env || !SDL_atoi(env)) {
    1.28 +#ifdef X11MODES_DEBUG
    1.29 +        printf("XRandR disabled by default due to window manager issues\n");
    1.30 +#endif
    1.31 +        return SDL_FALSE;
    1.32 +    }
    1.33 +#else
    1.34      if (env && !SDL_atoi(env)) {
    1.35  #ifdef X11MODES_DEBUG
    1.36          printf("XRandR disabled due to hint\n");
    1.37  #endif
    1.38          return SDL_FALSE;
    1.39      }
    1.40 +#endif /* XRANDR_ENABLED_BY_DEFAULT */
    1.41  
    1.42      if (!SDL_X11_HAVE_XRANDR) {
    1.43  #ifdef X11MODES_DEBUG