Mercurial > sdl-ios-xcode
view test/testerror.c @ 3539:f2846bf19360
Fixed bug #896
John Popplewell 2009-12-08 23:05:50 PST
Originally reported by AKFoerster on the mailing list.
Error decoding UTF8 Russian text to UTF-16LE on Windows, but specifically on
platforms without iconv support (the default on Windows).
Valid UTF8 characters are flagged as being overlong and then substituted by the
UNKNOWN_UNICODE character.
After studying the testiconv.c example program, reading the RFCs and putting
some printf statements in SDL_iconv.c the problem is in a test for 'Maximum
overlong sequences', specifically 4.2.1, which is carried out by the following
code:
} else if ( p[0] >= 0xC0 ) {
if ( (p[0] & 0xE0) != 0xC0 ) {
/* Skip illegal sequences
return SDL_ICONV_EILSEQ;
*/
ch = UNKNOWN_UNICODE;
} else {
if ( (p[0] & 0xCE) == 0xC0 ) { <<<<<<<< here
overlong = SDL_TRUE;
}
ch = (Uint32)(p[0] & 0x1F);
left = 1;
}
} else {
Here is the 2-byte encoding of a character in range 00000080 - 000007FF
110xxxxx 10xxxxxx
The line in question is supposed to be checking for an overlong sequence which
would be less than
11000001 10111111
which should be represented as a single byte.
BUT, the mask value (0xCE) is wrong, it isn't checking the top-most bit:
11000001 value
11001110 mask (incorrect)
^
and should be (0xDE):
11000001 value
11011110 mask (correct)
making the above code:
} else if ( p[0] >= 0xC0 ) {
if ( (p[0] & 0xE0) != 0xC0 ) {
/* Skip illegal sequences
return SDL_ICONV_EILSEQ;
*/
ch = UNKNOWN_UNICODE;
} else {
if ( (p[0] & 0xDE) == 0xC0 ) { <<<<<<<< here
overlong = SDL_TRUE;
}
ch = (Uint32)(p[0] & 0x1F);
left = 1;
}
} else {
I can supply a test program and/or a patch if required,
best regards,
John Popplewell
author | Sam Lantinga <slouken@libsdl.org> |
---|---|
date | Fri, 11 Dec 2009 08:03:43 +0000 |
parents | c121d94672cb |
children | 0d1b16ee0bca |
line wrap: on
line source
/* Simple test of the SDL threading code and error handling */ #include <stdio.h> #include <stdlib.h> #include <signal.h> #include "SDL.h" #include "SDL_thread.h" static int alive = 0; /* Call this instead of exit(), so we can clean up SDL: atexit() is evil. */ static void quit(int rc) { SDL_Quit(); exit(rc); } int SDLCALL ThreadFunc(void *data) { /* Set the child thread error string */ SDL_SetError("Thread %s (%d) had a problem: %s", (char *) data, SDL_ThreadID(), "nevermind"); while (alive) { printf("Thread '%s' is alive!\n", (char *) data); SDL_Delay(1 * 1000); } printf("Child thread error string: %s\n", SDL_GetError()); return (0); } int main(int argc, char *argv[]) { SDL_Thread *thread; /* Load the SDL library */ if (SDL_Init(0) < 0) { fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError()); return (1); } /* Set the error value for the main thread */ SDL_SetError("No worries"); alive = 1; thread = SDL_CreateThread(ThreadFunc, "#1"); if (thread == NULL) { fprintf(stderr, "Couldn't create thread: %s\n", SDL_GetError()); quit(1); } SDL_Delay(5 * 1000); printf("Waiting for thread #1\n"); alive = 0; SDL_WaitThread(thread, NULL); printf("Main thread error string: %s\n", SDL_GetError()); SDL_Quit(); return (0); }