# HG changeset patch # User Sam Lantinga # Date 1295482007 28800 # Node ID da347bfed2402836977e606d3e8b5f30524c6a1d # Parent 8c39b82dc7b01cf80d43bad0f3e68b5950555fe1 Florian Forster to sdl in SDL 1.3 (revision 5508 from SVN), the method used to calculate the bits per pixel from a “int format” differ between “SDL_ListModes” (which always uses the “SDL_BITSPERPIXEL” macro) and “SDL_PixelFormatEnumTo- Masks” (which uses either “SDL_BITSPERPIXEL” or “SDL_BYTESPERPIXEL * 8”, depending on the value of “SDL_BYTESPERPIXEL”). Because the values are later compared in “SDL_ListModes” this may lead to some valid video modes not being returned. In my case the only mode returned by “SDL_GetNumDisplayModes” was dismissed and NULL was returned. (This led to the calling application sticking its head in the sand.) The attached patch copies the method used within “SDL_PixelFormatEnumTo- Masks” to “SDL_ListModes”. This solved the problem for me though I don't fully understand the method used by “SDL_PixelFormatEnumToMasks”. diff -r 8c39b82dc7b0 -r da347bfed240 src/SDL_compat.c --- a/src/SDL_compat.c Wed Jan 19 16:02:15 2011 -0800 +++ b/src/SDL_compat.c Wed Jan 19 16:06:47 2011 -0800 @@ -155,11 +155,21 @@ modes = NULL; for (i = 0; i < SDL_GetNumDisplayModes(); ++i) { SDL_DisplayMode mode; + int bpp; + SDL_GetDisplayMode(i, &mode); if (!mode.w || !mode.h) { return (SDL_Rect **) (-1); } - if (SDL_BITSPERPIXEL(mode.format) != format->BitsPerPixel) { + + /* Copied from src/video/SDL_pixels.c:SDL_PixelFormatEnumToMasks */ + if (SDL_BYTESPERPIXEL(mode.format) <= 2) { + bpp = SDL_BITSPERPIXEL(mode.format); + } else { + bpp = SDL_BYTESPERPIXEL(mode.format) * 8; + } + + if (bpp != format->BitsPerPixel) { continue; } if (nmodes > 0 && modes[nmodes - 1]->w == mode.w