Added a hint to determine whether framebuffer texture acceleration should be used, and added default behaviors for the various platforms.
authorSam Lantinga <slouken@libsdl.org>
Sat, 05 Feb 2011 10:03:12 -0800
changeset 519082a48f4d65f6
parent 5189 6f6a9340fb93
child 5191 93052810ceb5
Added a hint to determine whether framebuffer texture acceleration should be used, and added default behaviors for the various platforms.
include/SDL_hints.h
src/video/SDL_video.c
     1.1 --- a/include/SDL_hints.h	Sat Feb 05 10:02:39 2011 -0800
     1.2 +++ b/include/SDL_hints.h	Sat Feb 05 10:03:12 2011 -0800
     1.3 @@ -50,6 +50,22 @@
     1.4  /* *INDENT-ON* */
     1.5  #endif
     1.6  
     1.7 +/**
     1.8 + *  \brief  A variable controlling how 3D acceleration is used to accelerate the SDL 1.2 screen surface. 
     1.9 + *
    1.10 + *  SDL can try to accelerate the SDL 1.2 screen surface by using streaming
    1.11 + *  textures with a 3D rendering engine.  This variable controls whether and
    1.12 + *  how this is done.
    1.13 + *
    1.14 + *  This variable can be set to the following values:
    1.15 + *    "0"       - Disable 3D acceleration
    1.16 + *    "1"       - Enable 3D acceleration, using the default renderer.
    1.17 + *    "X"       - Enable 3D acceleration, using X where X is one of the valid rendering drivers.  (e.g. "direct3d", "opengl", etc.)
    1.18 + *
    1.19 + *  By default SDL tries to make a best guess for each platform whether
    1.20 + *  to use acceleration or not.
    1.21 + */
    1.22 +#define SDL_HINT_FRAMEBUFFER_ACCELERATION   "SDL_FRAMEBUFFER_ACCELERATION"
    1.23  
    1.24  
    1.25  /**
     2.1 --- a/src/video/SDL_video.c	Sat Feb 05 10:02:39 2011 -0800
     2.2 +++ b/src/video/SDL_video.c	Sat Feb 05 10:03:12 2011 -0800
     2.3 @@ -107,6 +107,80 @@
     2.4      int bytes_per_pixel;
     2.5  } SDL_WindowTextureData;
     2.6  
     2.7 +static SDL_bool
     2.8 +ShouldUseTextureFramebuffer()
     2.9 +{
    2.10 +    const char *hint;
    2.11 +
    2.12 +    /* If there's no native framebuffer support then there's no option */
    2.13 +    if (!_this->CreateWindowFramebuffer) {
    2.14 +        return SDL_TRUE;
    2.15 +    }
    2.16 +
    2.17 +    /* See if the user or application wants a specific behavior */
    2.18 +    hint = SDL_GetHint(SDL_HINT_FRAMEBUFFER_ACCELERATION);
    2.19 +    if (hint) {
    2.20 +        if (*hint == '0') {
    2.21 +            return SDL_FALSE;
    2.22 +        } else {
    2.23 +            return SDL_TRUE;
    2.24 +        }
    2.25 +    }
    2.26 +
    2.27 +    /* Each platform has different performance characteristics */
    2.28 +#if defined(__WIN32__)
    2.29 +    /* GDI BitBlt() is way faster than Direct3D dynamic textures right now.
    2.30 +     */
    2.31 +    return SDL_FALSE;
    2.32 +
    2.33 +#elif defined(__MACOSX__)
    2.34 +    /* Mac OS X uses OpenGL as the native fast path */
    2.35 +    return SDL_TRUE;
    2.36 +
    2.37 +#elif defined(__LINUX__)
    2.38 +    /* Properly configured OpenGL drivers are faster than MIT-SHM */
    2.39 +#if SDL_VIDEO_OPENGL
    2.40 +    /* Ugh, find a way to cache this value! */
    2.41 +    {
    2.42 +        SDL_Window *window;
    2.43 +        SDL_GLContext context;
    2.44 +        SDL_bool hasAcceleratedOpenGL = SDL_FALSE;
    2.45 +
    2.46 +        window = SDL_CreateWindow("OpenGL test", -32, -32, 32, 32, SDL_WINDOW_OPENGL);
    2.47 +        if (window) {
    2.48 +            context = SDL_GL_CreateContext(window);
    2.49 +            if (context) {
    2.50 +                const GLubyte *(APIENTRY * glGetStringFunc) (GLenum);
    2.51 +                const char *vendor = NULL;
    2.52 +
    2.53 +                glGetStringFunc = SDL_GL_GetProcAddress("glGetString");
    2.54 +                if (glGetStringFunc) {
    2.55 +                    vendor = (const char *) glGetStringFunc(GL_VENDOR);
    2.56 +                }
    2.57 +                /* Add more vendors here at will... */
    2.58 +                if (vendor &&
    2.59 +                    (SDL_strstr(vendor, "ATI Technologies") ||
    2.60 +                     SDL_strstr(vendor, "NVIDIA"))) {
    2.61 +                    hasAcceleratedOpenGL = SDL_TRUE;
    2.62 +                }
    2.63 +                SDL_GL_DeleteContext(context);
    2.64 +            }
    2.65 +            SDL_DestroyWindow(window);
    2.66 +        }
    2.67 +        return hasAcceleratedOpenGL;
    2.68 +    }
    2.69 +#else
    2.70 +    return SDL_FALSE;
    2.71 +#endif
    2.72 +
    2.73 +#else
    2.74 +    /* Play it safe, assume that if there is a framebuffer driver that it's
    2.75 +       optimized for the current platform.
    2.76 +    */
    2.77 +    return SDL_FALSE;
    2.78 +#endif
    2.79 +}
    2.80 +
    2.81  static int
    2.82  SDL_CreateWindowTexture(_THIS, SDL_Window * window, Uint32 * format, void ** pixels, int *pitch)
    2.83  {
    2.84 @@ -401,8 +475,8 @@
    2.85          return (-1);
    2.86      }
    2.87  
    2.88 -    /* Add the renderer framebuffer emulation if needed */
    2.89 -    if (!_this->CreateWindowFramebuffer) {
    2.90 +    /* Add the renderer framebuffer emulation if desired */
    2.91 +    if (ShouldUseTextureFramebuffer()) {
    2.92          _this->CreateWindowFramebuffer = SDL_CreateWindowTexture;
    2.93          _this->UpdateWindowFramebuffer = SDL_UpdateWindowTexture;
    2.94          _this->DestroyWindowFramebuffer = SDL_DestroyWindowTexture;