make kernel UTFmax and Runemax consistent with libc (21-bit runes) (thanks maurice)
This commit is contained in:
parent
3e52ada202
commit
0276031c01
1 changed files with 4 additions and 4 deletions
|
@ -35,11 +35,11 @@ extern int cistrncmp(char*, char*, int);
|
|||
|
||||
enum
|
||||
{
|
||||
UTFmax = 3, /* maximum bytes per rune */
|
||||
Runesync = 0x80, /* cannot represent part of a UTF sequence */
|
||||
Runeself = 0x80, /* rune and UTF sequences are the same (<) */
|
||||
UTFmax = 4, /* maximum bytes per rune */
|
||||
Runesync = 0x80, /* cannot represent part of a UTF sequence */
|
||||
Runeself = 0x80, /* rune and UTF sequences are the same (<) */
|
||||
Runeerror = 0xFFFD, /* decoding error in UTF */
|
||||
Runemax = 0xFFFF, /* 16 bit rune */
|
||||
Runemax = 0x10FFFF, /* 21 bit rune */
|
||||
};
|
||||
|
||||
/*
|
||||
|
|
Loading…
Reference in a new issue