Mercurial > sdl-ios-xcode
view test/testtimer.c @ 3539:f2846bf19360
Fixed bug #896
John Popplewell 2009-12-08 23:05:50 PST
Originally reported by AKFoerster on the mailing list.
Error decoding UTF8 Russian text to UTF-16LE on Windows, but specifically on
platforms without iconv support (the default on Windows).
Valid UTF8 characters are flagged as being overlong and then substituted by the
UNKNOWN_UNICODE character.
After studying the testiconv.c example program, reading the RFCs and putting
some printf statements in SDL_iconv.c the problem is in a test for 'Maximum
overlong sequences', specifically 4.2.1, which is carried out by the following
code:
} else if ( p[0] >= 0xC0 ) {
if ( (p[0] & 0xE0) != 0xC0 ) {
/* Skip illegal sequences
return SDL_ICONV_EILSEQ;
*/
ch = UNKNOWN_UNICODE;
} else {
if ( (p[0] & 0xCE) == 0xC0 ) { <<<<<<<< here
overlong = SDL_TRUE;
}
ch = (Uint32)(p[0] & 0x1F);
left = 1;
}
} else {
Here is the 2-byte encoding of a character in range 00000080 - 000007FF
110xxxxx 10xxxxxx
The line in question is supposed to be checking for an overlong sequence which
would be less than
11000001 10111111
which should be represented as a single byte.
BUT, the mask value (0xCE) is wrong, it isn't checking the top-most bit:
11000001 value
11001110 mask (incorrect)
^
and should be (0xDE):
11000001 value
11011110 mask (correct)
making the above code:
} else if ( p[0] >= 0xC0 ) {
if ( (p[0] & 0xE0) != 0xC0 ) {
/* Skip illegal sequences
return SDL_ICONV_EILSEQ;
*/
ch = UNKNOWN_UNICODE;
} else {
if ( (p[0] & 0xDE) == 0xC0 ) { <<<<<<<< here
overlong = SDL_TRUE;
}
ch = (Uint32)(p[0] & 0x1F);
left = 1;
}
} else {
I can supply a test program and/or a patch if required,
best regards,
John Popplewell
author | Sam Lantinga <slouken@libsdl.org> |
---|---|
date | Fri, 11 Dec 2009 08:03:43 +0000 |
parents | c121d94672cb |
children |
line wrap: on
line source
/* Test program to check the resolution of the SDL timer on the current platform */ #include <stdlib.h> #include <stdio.h> #include "SDL.h" #define DEFAULT_RESOLUTION 1 static int ticks = 0; static Uint32 SDLCALL ticktock(Uint32 interval) { ++ticks; return (interval); } static Uint32 SDLCALL callback(Uint32 interval, void *param) { printf("Timer %d : param = %d\n", interval, (int) (uintptr_t) param); return interval; } int main(int argc, char *argv[]) { int desired; SDL_TimerID t1, t2, t3; if (SDL_Init(SDL_INIT_TIMER) < 0) { fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError()); return (1); } /* Start the timer */ desired = 0; if (argv[1]) { desired = atoi(argv[1]); } if (desired == 0) { desired = DEFAULT_RESOLUTION; } SDL_SetTimer(desired, ticktock); /* Wait 10 seconds */ printf("Waiting 10 seconds\n"); SDL_Delay(10 * 1000); /* Stop the timer */ SDL_SetTimer(0, NULL); /* Print the results */ if (ticks) { fprintf(stderr, "Timer resolution: desired = %d ms, actual = %f ms\n", desired, (double) (10 * 1000) / ticks); } /* Test multiple timers */ printf("Testing multiple timers...\n"); t1 = SDL_AddTimer(100, callback, (void *) 1); if (!t1) fprintf(stderr, "Could not create timer 1: %s\n", SDL_GetError()); t2 = SDL_AddTimer(50, callback, (void *) 2); if (!t2) fprintf(stderr, "Could not create timer 2: %s\n", SDL_GetError()); t3 = SDL_AddTimer(233, callback, (void *) 3); if (!t3) fprintf(stderr, "Could not create timer 3: %s\n", SDL_GetError()); /* Wait 10 seconds */ printf("Waiting 10 seconds\n"); SDL_Delay(10 * 1000); printf("Removing timer 1 and waiting 5 more seconds\n"); SDL_RemoveTimer(t1); SDL_Delay(5 * 1000); SDL_RemoveTimer(t2); SDL_RemoveTimer(t3); SDL_Quit(); return (0); }