On some architectures, int just isn't big enough to hold all Unicode
code points.
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <stdint.h>
#include <jansson.h>
#include "jansson_private.h"
#include <string.h>
#include <stdarg.h>
#include <assert.h>
+#include <stdint.h>
#include <jansson.h>
#include "jansson_private.h"
}
/* assumes that str points to 'u' plus at least 4 valid hex digits */
-static int decode_unicode_escape(const char *str)
+static int32_t decode_unicode_escape(const char *str)
{
int i;
- int value = 0;
+ int32_t value = 0;
assert(str[0] == 'u');
if(*p == 'u') {
char buffer[4];
int length;
- int value;
+ int32_t value;
value = decode_unicode_escape(p);
p += 5;
if(0xD800 <= value && value <= 0xDBFF) {
/* surrogate pair */
if(*p == '\\' && *(p + 1) == 'u') {
- int value2 = decode_unicode_escape(++p);
+ int32_t value2 = decode_unicode_escape(++p);
p += 5;
if(0xDC00 <= value2 && value2 <= 0xDFFF) {
*/
#include <string.h>
+#include <stdint.h>
-int utf8_encode(int codepoint, char *buffer, int *size)
+int utf8_encode(int32_t codepoint, char *buffer, int *size)
{
if(codepoint < 0)
return -1;
int utf8_check_full(const char *buffer, int size)
{
- int i, value = 0;
+ int i;
+ int32_t value = 0;
unsigned char u = (unsigned char)buffer[0];
if(size == 2)