Use int32_t instead of plain int with Unicode code points

On some architectures, int just isn't big enough to hold all Unicode
code points.
This commit is contained in:
Petri Lehtinen
2009-12-02 23:48:50 +02:00
parent e0a88d19d1
commit d67aeb9739
3 changed files with 10 additions and 6 deletions

View File

@@ -6,8 +6,9 @@
*/
#include <string.h>
#include <stdint.h>
int utf8_encode(int codepoint, char *buffer, int *size)
int utf8_encode(int32_t codepoint, char *buffer, int *size)
{
if(codepoint < 0)
return -1;
@@ -81,7 +82,8 @@ int utf8_check_first(char byte)
int utf8_check_full(const char *buffer, int size)
{
int i, value = 0;
int i;
int32_t value = 0;
unsigned char u = (unsigned char)buffer[0];
if(size == 2)