You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

json_tokener.c 38 kB

json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
5 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416
  1. /*
  2. * $Id: json_tokener.c,v 1.20 2006/07/25 03:24:50 mclark Exp $
  3. *
  4. * Copyright (c) 2004, 2005 Metaparadigm Pte. Ltd.
  5. * Michael Clark <michael@metaparadigm.com>
  6. *
  7. * This library is free software; you can redistribute it and/or modify
  8. * it under the terms of the MIT license. See COPYING for details.
  9. *
  10. *
  11. * Copyright (c) 2008-2009 Yahoo! Inc. All rights reserved.
  12. * The copyrights to the contents of this file are licensed under the MIT License
  13. * (https://www.opensource.org/licenses/mit-license.php)
  14. */
  15. #include "config.h"
  16. #include "math_compat.h"
  17. #include <assert.h>
  18. #include <errno.h>
  19. #include <limits.h>
  20. #include <math.h>
  21. #include <stddef.h>
  22. #include <stdio.h>
  23. #include <stdlib.h>
  24. #include <string.h>
  25. #include "debug.h"
  26. #include "json_inttypes.h"
  27. #include "json_object.h"
  28. #include "json_object_private.h"
  29. #include "json_tokener.h"
  30. #include "json_util.h"
  31. #include "printbuf.h"
  32. #include "strdup_compat.h"
  33. #ifdef HAVE_LOCALE_H
  34. #include <locale.h>
  35. #endif /* HAVE_LOCALE_H */
  36. #ifdef HAVE_XLOCALE_H
  37. #include <xlocale.h>
  38. #endif
  39. #ifdef HAVE_STRINGS_H
  40. #include <strings.h>
  41. #endif /* HAVE_STRINGS_H */
  42. #define jt_hexdigit(x) (((x) <= '9') ? (x) - '0' : ((x)&7) + 9)
  43. #if !HAVE_STRNCASECMP && defined(_MSC_VER)
  44. /* MSC has the version as _strnicmp */
  45. #define strncasecmp _strnicmp
  46. #elif !HAVE_STRNCASECMP
  47. #error You do not have strncasecmp on your system.
  48. #endif /* HAVE_STRNCASECMP */
  49. #if defined(_MSC_VER) && (_MSC_VER <= 1800)
  50. /* VS2013 doesn't know about "inline" */
  51. #define inline __inline
  52. #elif defined(AIX_CC)
  53. #define inline
  54. #endif
  55. /* The following helper functions are used to speed up parsing. They
  56. * are faster than their ctype counterparts because they assume that
  57. * the input is in ASCII and that the locale is set to "C". The
  58. * compiler will also inline these functions, providing an additional
  59. * speedup by saving on function calls.
  60. */
  61. static inline int is_ws_char(char c)
  62. {
  63. return c == ' '
  64. || c == '\t'
  65. || c == '\n'
  66. || c == '\r';
  67. }
  68. static inline int is_hex_char(char c)
  69. {
  70. return (c >= '0' && c <= '9')
  71. || (c >= 'A' && c <= 'F')
  72. || (c >= 'a' && c <= 'f');
  73. }
  74. /* Use C99 NAN by default; if not available, nan("") should work too. */
  75. #ifndef NAN
  76. #define NAN nan("")
  77. #endif /* !NAN */
  78. static const char json_null_str[] = "null";
  79. static const int json_null_str_len = sizeof(json_null_str) - 1;
  80. static const char json_inf_str[] = "Infinity";
  81. /* Swapped case "Infinity" to avoid need to call tolower() on input chars: */
  82. static const char json_inf_str_invert[] = "iNFINITY";
  83. static const unsigned int json_inf_str_len = sizeof(json_inf_str) - 1;
  84. static const char json_nan_str[] = "NaN";
  85. static const int json_nan_str_len = sizeof(json_nan_str) - 1;
  86. static const char json_true_str[] = "true";
  87. static const int json_true_str_len = sizeof(json_true_str) - 1;
  88. static const char json_false_str[] = "false";
  89. static const int json_false_str_len = sizeof(json_false_str) - 1;
  90. /* clang-format off */
  91. static const char *json_tokener_errors[] = {
  92. "success",
  93. "continue",
  94. "nesting too deep",
  95. "unexpected end of data",
  96. "unexpected character",
  97. "null expected",
  98. "boolean expected",
  99. "number expected",
  100. "array value separator ',' expected",
  101. "quoted object property name expected",
  102. "object property name separator ':' expected",
  103. "object value separator ',' expected",
  104. "invalid string sequence",
  105. "expected comment",
  106. "invalid utf-8 string",
  107. "buffer size overflow",
  108. "out of memory"
  109. };
  110. /* clang-format on */
  111. /**
  112. * validete the utf-8 string in strict model.
  113. * if not utf-8 format, return err.
  114. */
  115. static json_bool json_tokener_validate_utf8(const char c, unsigned int *nBytes);
  116. static int json_tokener_parse_double(const char *buf, int len, double *retval);
  117. const char *json_tokener_error_desc(enum json_tokener_error jerr)
  118. {
  119. int jerr_int = (int)jerr;
  120. if (jerr_int < 0 ||
  121. jerr_int >= (int)(sizeof(json_tokener_errors) / sizeof(json_tokener_errors[0])))
  122. return "Unknown error, "
  123. "invalid json_tokener_error value passed to json_tokener_error_desc()";
  124. return json_tokener_errors[jerr];
  125. }
  126. enum json_tokener_error json_tokener_get_error(struct json_tokener *tok)
  127. {
  128. return tok->err;
  129. }
  130. /* Stuff for decoding unicode sequences */
  131. #define IS_HIGH_SURROGATE(uc) (((uc)&0xFFFFFC00) == 0xD800)
  132. #define IS_LOW_SURROGATE(uc) (((uc)&0xFFFFFC00) == 0xDC00)
  133. #define DECODE_SURROGATE_PAIR(hi, lo) ((((hi)&0x3FF) << 10) + ((lo)&0x3FF) + 0x10000)
  134. static unsigned char utf8_replacement_char[3] = {0xEF, 0xBF, 0xBD};
  135. struct json_tokener *json_tokener_new_ex(int depth)
  136. {
  137. struct json_tokener *tok;
  138. if (depth < 1)
  139. return NULL;
  140. tok = (struct json_tokener *)calloc(1, sizeof(struct json_tokener));
  141. if (!tok)
  142. return NULL;
  143. tok->stack = (struct json_tokener_srec *)calloc(depth, sizeof(struct json_tokener_srec));
  144. if (!tok->stack)
  145. {
  146. free(tok);
  147. return NULL;
  148. }
  149. tok->pb = printbuf_new();
  150. if (!tok->pb)
  151. {
  152. free(tok->stack);
  153. free(tok);
  154. return NULL;
  155. }
  156. tok->max_depth = depth;
  157. json_tokener_reset(tok);
  158. return tok;
  159. }
  160. struct json_tokener *json_tokener_new(void)
  161. {
  162. return json_tokener_new_ex(JSON_TOKENER_DEFAULT_DEPTH);
  163. }
  164. void json_tokener_free(struct json_tokener *tok)
  165. {
  166. if (!tok)
  167. return;
  168. json_tokener_reset(tok);
  169. if (tok->pb)
  170. printbuf_free(tok->pb);
  171. free(tok->stack);
  172. free(tok);
  173. }
  174. static void json_tokener_reset_level(struct json_tokener *tok, int depth)
  175. {
  176. tok->stack[depth].state = json_tokener_state_eatws;
  177. tok->stack[depth].saved_state = json_tokener_state_start;
  178. json_object_put(tok->stack[depth].current);
  179. tok->stack[depth].current = NULL;
  180. free(tok->stack[depth].obj_field_name);
  181. tok->stack[depth].obj_field_name = NULL;
  182. }
  183. void json_tokener_reset(struct json_tokener *tok)
  184. {
  185. int i;
  186. if (!tok)
  187. return;
  188. for (i = tok->depth; i >= 0; i--)
  189. json_tokener_reset_level(tok, i);
  190. tok->depth = 0;
  191. tok->err = json_tokener_success;
  192. }
  193. struct json_object *json_tokener_parse(const char *str)
  194. {
  195. enum json_tokener_error jerr_ignored;
  196. struct json_object *obj;
  197. obj = json_tokener_parse_verbose(str, &jerr_ignored);
  198. return obj;
  199. }
  200. struct json_object *json_tokener_parse_verbose(const char *str, enum json_tokener_error *error)
  201. {
  202. struct json_tokener *tok;
  203. struct json_object *obj;
  204. tok = json_tokener_new();
  205. if (!tok)
  206. {
  207. *error = json_tokener_error_memory;
  208. return NULL;
  209. }
  210. obj = json_tokener_parse_ex(tok, str, -1);
  211. *error = tok->err;
  212. if (tok->err != json_tokener_success
  213. #if 0
  214. /* This would be a more sensible default, and cause parsing
  215. * things like "null123" to fail when the caller can't know
  216. * where the parsing left off, but starting to fail would
  217. * be a notable behaviour change. Save for a 1.0 release.
  218. */
  219. || json_tokener_get_parse_end(tok) != strlen(str)
  220. #endif
  221. )
  222. {
  223. if (obj != NULL)
  224. json_object_put(obj);
  225. obj = NULL;
  226. }
  227. json_tokener_free(tok);
  228. return obj;
  229. }
  230. #define state tok->stack[tok->depth].state
  231. #define saved_state tok->stack[tok->depth].saved_state
  232. #define current tok->stack[tok->depth].current
  233. #define obj_field_name tok->stack[tok->depth].obj_field_name
  234. /* Optimization:
  235. * json_tokener_parse_ex() consumed a lot of CPU in its main loop,
  236. * iterating character-by character. A large performance boost is
  237. * achieved by using tighter loops to locally handle units such as
  238. * comments and strings. Loops that handle an entire token within
  239. * their scope also gather entire strings and pass them to
  240. * printbuf_memappend() in a single call, rather than calling
  241. * printbuf_memappend() one char at a time.
  242. *
  243. * PEEK_CHAR() and ADVANCE_CHAR() macros are used for code that is
  244. * common to both the main loop and the tighter loops.
  245. */
  246. /* PEEK_CHAR(dest, tok) macro:
  247. * Peeks at the current char and stores it in dest.
  248. * Returns 1 on success, sets tok->err and returns 0 if no more chars.
  249. * Implicit inputs: str, len, nBytesp vars
  250. */
  251. #define PEEK_CHAR(dest, tok) \
  252. (((tok)->char_offset == len) \
  253. ? (((tok)->depth == 0 && state == json_tokener_state_eatws && \
  254. saved_state == json_tokener_state_finish) \
  255. ? (((tok)->err = json_tokener_success), 0) \
  256. : (((tok)->err = json_tokener_continue), 0)) \
  257. : (((tok->flags & JSON_TOKENER_VALIDATE_UTF8) && \
  258. (!json_tokener_validate_utf8(*str, nBytesp))) \
  259. ? ((tok->err = json_tokener_error_parse_utf8_string), 0) \
  260. : (((dest) = *str), 1)))
  261. /* ADVANCE_CHAR() macro:
  262. * Increments str & tok->char_offset.
  263. * For convenience of existing conditionals, returns the old value of c (0 on eof).
  264. * Implicit inputs: c var
  265. */
  266. #define ADVANCE_CHAR(str, tok) (++(str), ((tok)->char_offset)++, c)
  267. /* printbuf_memappend_checked(p, s, l) macro:
  268. * Add string s of length l to printbuffer p.
  269. * If operation fails abort parse operation with memory error.
  270. */
  271. #define printbuf_memappend_checked(p, s, l) \
  272. do { \
  273. if (printbuf_memappend((p), (s), (l)) < 0) \
  274. { \
  275. tok->err = json_tokener_error_memory; \
  276. goto out; \
  277. } \
  278. } while (0)
  279. /* End optimization macro defs */
  280. struct json_object *json_tokener_parse_ex(struct json_tokener *tok, const char *str, int len)
  281. {
  282. struct json_object *obj = NULL;
  283. char c = '\1';
  284. unsigned int nBytes = 0;
  285. unsigned int *nBytesp = &nBytes;
  286. #ifdef HAVE_USELOCALE
  287. locale_t oldlocale = uselocale(NULL);
  288. locale_t newloc;
  289. #elif defined(HAVE_SETLOCALE)
  290. char *oldlocale = NULL;
  291. #endif
  292. tok->char_offset = 0;
  293. tok->err = json_tokener_success;
  294. /* this interface is presently not 64-bit clean due to the int len argument
  295. * and the internal printbuf interface that takes 32-bit int len arguments
  296. * so the function limits the maximum string size to INT32_MAX (2GB).
  297. * If the function is called with len == -1 then strlen is called to check
  298. * the string length is less than INT32_MAX (2GB)
  299. */
  300. if ((len < -1) || (len == -1 && strlen(str) > INT32_MAX))
  301. {
  302. tok->err = json_tokener_error_size;
  303. return NULL;
  304. }
  305. #ifdef HAVE_USELOCALE
  306. {
  307. #ifdef HAVE_DUPLOCALE
  308. locale_t duploc = duplocale(oldlocale);
  309. if (duploc == NULL && errno == ENOMEM)
  310. {
  311. tok->err = json_tokener_error_memory;
  312. return NULL;
  313. }
  314. newloc = newlocale(LC_NUMERIC_MASK, "C", duploc);
  315. #else
  316. newloc = newlocale(LC_NUMERIC_MASK, "C", oldlocale);
  317. #endif
  318. if (newloc == NULL)
  319. {
  320. tok->err = json_tokener_error_memory;
  321. #ifdef HAVE_DUPLOCALE
  322. freelocale(duploc);
  323. #endif
  324. return NULL;
  325. }
  326. #ifdef NEWLOCALE_NEEDS_FREELOCALE
  327. #ifdef HAVE_DUPLOCALE
  328. // Older versions of FreeBSD (<12.4) don't free the locale
  329. // passed to newlocale(), so do it here
  330. freelocale(duploc);
  331. #endif
  332. #endif
  333. uselocale(newloc);
  334. }
  335. #elif defined(HAVE_SETLOCALE)
  336. {
  337. char *tmplocale;
  338. tmplocale = setlocale(LC_NUMERIC, NULL);
  339. if (tmplocale)
  340. {
  341. oldlocale = strdup(tmplocale);
  342. if (oldlocale == NULL)
  343. {
  344. tok->err = json_tokener_error_memory;
  345. return NULL;
  346. }
  347. }
  348. setlocale(LC_NUMERIC, "C");
  349. }
  350. #endif
  351. while (PEEK_CHAR(c, tok)) // Note: c might be '\0' !
  352. {
  353. redo_char:
  354. switch (state)
  355. {
  356. case json_tokener_state_eatws:
  357. /* Advance until we change state */
  358. while (is_ws_char(c))
  359. {
  360. if ((!ADVANCE_CHAR(str, tok)) || (!PEEK_CHAR(c, tok)))
  361. goto out;
  362. }
  363. if (c == '/' && !(tok->flags & JSON_TOKENER_STRICT))
  364. {
  365. printbuf_reset(tok->pb);
  366. printbuf_memappend_checked(tok->pb, &c, 1);
  367. state = json_tokener_state_comment_start;
  368. }
  369. else
  370. {
  371. state = saved_state;
  372. goto redo_char;
  373. }
  374. break;
  375. case json_tokener_state_start:
  376. switch (c)
  377. {
  378. case '{':
  379. state = json_tokener_state_eatws;
  380. saved_state = json_tokener_state_object_field_start;
  381. current = json_object_new_object();
  382. if (current == NULL)
  383. {
  384. tok->err = json_tokener_error_memory;
  385. goto out;
  386. }
  387. break;
  388. case '[':
  389. state = json_tokener_state_eatws;
  390. saved_state = json_tokener_state_array;
  391. current = json_object_new_array();
  392. if (current == NULL)
  393. {
  394. tok->err = json_tokener_error_memory;
  395. goto out;
  396. }
  397. break;
  398. case 'I':
  399. case 'i':
  400. state = json_tokener_state_inf;
  401. printbuf_reset(tok->pb);
  402. tok->st_pos = 0;
  403. goto redo_char;
  404. case 'N':
  405. case 'n':
  406. state = json_tokener_state_null; // or NaN
  407. printbuf_reset(tok->pb);
  408. tok->st_pos = 0;
  409. goto redo_char;
  410. case '\'':
  411. if (tok->flags & JSON_TOKENER_STRICT)
  412. {
  413. /* in STRICT mode only double-quote are allowed */
  414. tok->err = json_tokener_error_parse_unexpected;
  415. goto out;
  416. }
  417. /* FALLTHRU */
  418. case '"':
  419. state = json_tokener_state_string;
  420. printbuf_reset(tok->pb);
  421. tok->quote_char = c;
  422. break;
  423. case 'T':
  424. case 't':
  425. case 'F':
  426. case 'f':
  427. state = json_tokener_state_boolean;
  428. printbuf_reset(tok->pb);
  429. tok->st_pos = 0;
  430. goto redo_char;
  431. case '0':
  432. case '1':
  433. case '2':
  434. case '3':
  435. case '4':
  436. case '5':
  437. case '6':
  438. case '7':
  439. case '8':
  440. case '9':
  441. case '-':
  442. state = json_tokener_state_number;
  443. printbuf_reset(tok->pb);
  444. tok->is_double = 0;
  445. goto redo_char;
  446. default: tok->err = json_tokener_error_parse_unexpected; goto out;
  447. }
  448. break;
  449. case json_tokener_state_finish:
  450. if (tok->depth == 0)
  451. goto out;
  452. obj = json_object_get(current);
  453. json_tokener_reset_level(tok, tok->depth);
  454. tok->depth--;
  455. goto redo_char;
  456. case json_tokener_state_inf: /* aka starts with 'i' (or 'I', or "-i", or "-I") */
  457. {
  458. /* If we were guaranteed to have len set, then we could (usually) handle
  459. * the entire "Infinity" check in a single strncmp (strncasecmp), but
  460. * since len might be -1 (i.e. "read until \0"), we need to check it
  461. * a character at a time.
  462. * Trying to handle it both ways would make this code considerably more
  463. * complicated with likely little performance benefit.
  464. */
  465. int is_negative = 0;
  466. /* Note: tok->st_pos must be 0 when state is set to json_tokener_state_inf */
  467. while (tok->st_pos < (int)json_inf_str_len)
  468. {
  469. char inf_char = *str;
  470. if (inf_char != json_inf_str[tok->st_pos] &&
  471. ((tok->flags & JSON_TOKENER_STRICT) ||
  472. inf_char != json_inf_str_invert[tok->st_pos])
  473. )
  474. {
  475. tok->err = json_tokener_error_parse_unexpected;
  476. goto out;
  477. }
  478. tok->st_pos++;
  479. (void)ADVANCE_CHAR(str, tok);
  480. if (!PEEK_CHAR(c, tok))
  481. {
  482. /* out of input chars, for now at least */
  483. goto out;
  484. }
  485. }
  486. /* We checked the full length of "Infinity", so create the object.
  487. * When handling -Infinity, the number parsing code will have dropped
  488. * the "-" into tok->pb for us, so check it now.
  489. */
  490. if (printbuf_length(tok->pb) > 0 && *(tok->pb->buf) == '-')
  491. {
  492. is_negative = 1;
  493. }
  494. current = json_object_new_double(is_negative ? -INFINITY : INFINITY);
  495. if (current == NULL)
  496. {
  497. tok->err = json_tokener_error_memory;
  498. goto out;
  499. }
  500. saved_state = json_tokener_state_finish;
  501. state = json_tokener_state_eatws;
  502. goto redo_char;
  503. }
  504. break;
  505. case json_tokener_state_null: /* aka starts with 'n' */
  506. {
  507. int size;
  508. int size_nan;
  509. printbuf_memappend_checked(tok->pb, &c, 1);
  510. size = json_min(tok->st_pos + 1, json_null_str_len);
  511. size_nan = json_min(tok->st_pos + 1, json_nan_str_len);
  512. if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  513. strncasecmp(json_null_str, tok->pb->buf, size) == 0) ||
  514. (strncmp(json_null_str, tok->pb->buf, size) == 0))
  515. {
  516. if (tok->st_pos == json_null_str_len)
  517. {
  518. current = NULL;
  519. saved_state = json_tokener_state_finish;
  520. state = json_tokener_state_eatws;
  521. goto redo_char;
  522. }
  523. }
  524. else if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  525. strncasecmp(json_nan_str, tok->pb->buf, size_nan) == 0) ||
  526. (strncmp(json_nan_str, tok->pb->buf, size_nan) == 0))
  527. {
  528. if (tok->st_pos == json_nan_str_len)
  529. {
  530. current = json_object_new_double(NAN);
  531. if (current == NULL)
  532. {
  533. tok->err = json_tokener_error_memory;
  534. goto out;
  535. }
  536. saved_state = json_tokener_state_finish;
  537. state = json_tokener_state_eatws;
  538. goto redo_char;
  539. }
  540. }
  541. else
  542. {
  543. tok->err = json_tokener_error_parse_null;
  544. goto out;
  545. }
  546. tok->st_pos++;
  547. }
  548. break;
  549. case json_tokener_state_comment_start:
  550. if (c == '*')
  551. {
  552. state = json_tokener_state_comment;
  553. }
  554. else if (c == '/')
  555. {
  556. state = json_tokener_state_comment_eol;
  557. }
  558. else
  559. {
  560. tok->err = json_tokener_error_parse_comment;
  561. goto out;
  562. }
  563. printbuf_memappend_checked(tok->pb, &c, 1);
  564. break;
  565. case json_tokener_state_comment:
  566. {
  567. /* Advance until we change state */
  568. const char *case_start = str;
  569. while (c != '*')
  570. {
  571. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  572. {
  573. printbuf_memappend_checked(tok->pb, case_start,
  574. str - case_start);
  575. goto out;
  576. }
  577. }
  578. printbuf_memappend_checked(tok->pb, case_start, 1 + str - case_start);
  579. state = json_tokener_state_comment_end;
  580. }
  581. break;
  582. case json_tokener_state_comment_eol:
  583. {
  584. /* Advance until we change state */
  585. const char *case_start = str;
  586. while (c != '\n')
  587. {
  588. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  589. {
  590. printbuf_memappend_checked(tok->pb, case_start,
  591. str - case_start);
  592. goto out;
  593. }
  594. }
  595. printbuf_memappend_checked(tok->pb, case_start, str - case_start);
  596. MC_DEBUG("json_tokener_comment: %s\n", tok->pb->buf);
  597. state = json_tokener_state_eatws;
  598. }
  599. break;
  600. case json_tokener_state_comment_end:
  601. printbuf_memappend_checked(tok->pb, &c, 1);
  602. if (c == '/')
  603. {
  604. MC_DEBUG("json_tokener_comment: %s\n", tok->pb->buf);
  605. state = json_tokener_state_eatws;
  606. }
  607. else
  608. {
  609. state = json_tokener_state_comment;
  610. }
  611. break;
  612. case json_tokener_state_string:
  613. {
  614. /* Advance until we change state */
  615. const char *case_start = str;
  616. while (1)
  617. {
  618. if (c == tok->quote_char)
  619. {
  620. printbuf_memappend_checked(tok->pb, case_start,
  621. str - case_start);
  622. current =
  623. json_object_new_string_len(tok->pb->buf, tok->pb->bpos);
  624. if (current == NULL)
  625. {
  626. tok->err = json_tokener_error_memory;
  627. goto out;
  628. }
  629. saved_state = json_tokener_state_finish;
  630. state = json_tokener_state_eatws;
  631. break;
  632. }
  633. else if (c == '\\')
  634. {
  635. printbuf_memappend_checked(tok->pb, case_start,
  636. str - case_start);
  637. saved_state = json_tokener_state_string;
  638. state = json_tokener_state_string_escape;
  639. break;
  640. }
  641. else if ((tok->flags & JSON_TOKENER_STRICT) && (unsigned char)c <= 0x1f)
  642. {
  643. // Disallow control characters in strict mode
  644. tok->err = json_tokener_error_parse_string;
  645. goto out;
  646. }
  647. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  648. {
  649. printbuf_memappend_checked(tok->pb, case_start,
  650. str - case_start);
  651. goto out;
  652. }
  653. }
  654. }
  655. break;
  656. case json_tokener_state_string_escape:
  657. switch (c)
  658. {
  659. case '"':
  660. case '\\':
  661. case '/':
  662. printbuf_memappend_checked(tok->pb, &c, 1);
  663. state = saved_state;
  664. break;
  665. case 'b':
  666. case 'n':
  667. case 'r':
  668. case 't':
  669. case 'f':
  670. if (c == 'b')
  671. printbuf_memappend_checked(tok->pb, "\b", 1);
  672. else if (c == 'n')
  673. printbuf_memappend_checked(tok->pb, "\n", 1);
  674. else if (c == 'r')
  675. printbuf_memappend_checked(tok->pb, "\r", 1);
  676. else if (c == 't')
  677. printbuf_memappend_checked(tok->pb, "\t", 1);
  678. else if (c == 'f')
  679. printbuf_memappend_checked(tok->pb, "\f", 1);
  680. state = saved_state;
  681. break;
  682. case 'u':
  683. tok->ucs_char = 0;
  684. tok->st_pos = 0;
  685. state = json_tokener_state_escape_unicode;
  686. break;
  687. default: tok->err = json_tokener_error_parse_string; goto out;
  688. }
  689. break;
  690. // ===================================================
  691. case json_tokener_state_escape_unicode:
  692. {
  693. /* Handle a 4-byte \uNNNN sequence, or two sequences if a surrogate pair */
  694. while (1)
  695. {
  696. if (!c || !is_hex_char(c))
  697. {
  698. tok->err = json_tokener_error_parse_string;
  699. goto out;
  700. }
  701. tok->ucs_char |=
  702. ((unsigned int)jt_hexdigit(c) << ((3 - tok->st_pos) * 4));
  703. tok->st_pos++;
  704. if (tok->st_pos >= 4)
  705. break;
  706. (void)ADVANCE_CHAR(str, tok);
  707. if (!PEEK_CHAR(c, tok))
  708. {
  709. /*
  710. * We're out of characters in the current call to
  711. * json_tokener_parse(), but a subsequent call might
  712. * provide us with more, so leave our current state
  713. * as-is (including tok->high_surrogate) and return.
  714. */
  715. goto out;
  716. }
  717. }
  718. tok->st_pos = 0;
  719. /* Now, we have a full \uNNNN sequence in tok->ucs_char */
  720. /* If the *previous* sequence was a high surrogate ... */
  721. if (tok->high_surrogate)
  722. {
  723. if (IS_LOW_SURROGATE(tok->ucs_char))
  724. {
  725. /* Recalculate the ucs_char, then fall thru to process normally */
  726. tok->ucs_char = DECODE_SURROGATE_PAIR(tok->high_surrogate,
  727. tok->ucs_char);
  728. }
  729. else
  730. {
  731. /* High surrogate was not followed by a low surrogate
  732. * Replace the high and process the rest normally
  733. */
  734. printbuf_memappend_checked(tok->pb,
  735. (char *)utf8_replacement_char, 3);
  736. }
  737. tok->high_surrogate = 0;
  738. }
  739. if (tok->ucs_char < 0x80)
  740. {
  741. unsigned char unescaped_utf[1];
  742. unescaped_utf[0] = tok->ucs_char;
  743. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 1);
  744. }
  745. else if (tok->ucs_char < 0x800)
  746. {
  747. unsigned char unescaped_utf[2];
  748. unescaped_utf[0] = 0xc0 | (tok->ucs_char >> 6);
  749. unescaped_utf[1] = 0x80 | (tok->ucs_char & 0x3f);
  750. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 2);
  751. }
  752. else if (IS_HIGH_SURROGATE(tok->ucs_char))
  753. {
  754. /*
  755. * The next two characters should be \u, HOWEVER,
  756. * we can't simply peek ahead here, because the
  757. * characters we need might not be passed to us
  758. * until a subsequent call to json_tokener_parse.
  759. * Instead, transition through a couple of states.
  760. * (now):
  761. * _escape_unicode => _unicode_need_escape
  762. * (see a '\\' char):
  763. * _unicode_need_escape => _unicode_need_u
  764. * (see a 'u' char):
  765. * _unicode_need_u => _escape_unicode
  766. * ...and we'll end up back around here.
  767. */
  768. tok->high_surrogate = tok->ucs_char;
  769. tok->ucs_char = 0;
  770. state = json_tokener_state_escape_unicode_need_escape;
  771. break;
  772. }
  773. else if (IS_LOW_SURROGATE(tok->ucs_char))
  774. {
  775. /* Got a low surrogate not preceded by a high */
  776. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  777. }
  778. else if (tok->ucs_char < 0x10000)
  779. {
  780. unsigned char unescaped_utf[3];
  781. unescaped_utf[0] = 0xe0 | (tok->ucs_char >> 12);
  782. unescaped_utf[1] = 0x80 | ((tok->ucs_char >> 6) & 0x3f);
  783. unescaped_utf[2] = 0x80 | (tok->ucs_char & 0x3f);
  784. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 3);
  785. }
  786. else if (tok->ucs_char < 0x110000)
  787. {
  788. unsigned char unescaped_utf[4];
  789. unescaped_utf[0] = 0xf0 | ((tok->ucs_char >> 18) & 0x07);
  790. unescaped_utf[1] = 0x80 | ((tok->ucs_char >> 12) & 0x3f);
  791. unescaped_utf[2] = 0x80 | ((tok->ucs_char >> 6) & 0x3f);
  792. unescaped_utf[3] = 0x80 | (tok->ucs_char & 0x3f);
  793. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 4);
  794. }
  795. else
  796. {
  797. /* Don't know what we got--insert the replacement char */
  798. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  799. }
  800. state = saved_state; // i.e. _state_string or _state_object_field
  801. }
  802. break;
  803. case json_tokener_state_escape_unicode_need_escape:
  804. // We get here after processing a high_surrogate
  805. // require a '\\' char
  806. if (!c || c != '\\')
  807. {
  808. /* Got a high surrogate without another sequence following
  809. * it. Put a replacement char in for the high surrogate
  810. * and pop back up to _state_string or _state_object_field.
  811. */
  812. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  813. tok->high_surrogate = 0;
  814. tok->ucs_char = 0;
  815. tok->st_pos = 0;
  816. state = saved_state;
  817. goto redo_char;
  818. }
  819. state = json_tokener_state_escape_unicode_need_u;
  820. break;
  821. case json_tokener_state_escape_unicode_need_u:
  822. /* We already had a \ char, check that it's \u */
  823. if (!c || c != 'u')
  824. {
  825. /* Got a high surrogate with some non-unicode escape
  826. * sequence following it.
  827. * Put a replacement char in for the high surrogate
  828. * and handle the escape sequence normally.
  829. */
  830. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  831. tok->high_surrogate = 0;
  832. tok->ucs_char = 0;
  833. tok->st_pos = 0;
  834. state = json_tokener_state_string_escape;
  835. goto redo_char;
  836. }
  837. state = json_tokener_state_escape_unicode;
  838. break;
  839. // ===================================================
  840. case json_tokener_state_boolean:
  841. {
  842. int size1, size2;
  843. printbuf_memappend_checked(tok->pb, &c, 1);
  844. size1 = json_min(tok->st_pos + 1, json_true_str_len);
  845. size2 = json_min(tok->st_pos + 1, json_false_str_len);
  846. if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  847. strncasecmp(json_true_str, tok->pb->buf, size1) == 0) ||
  848. (strncmp(json_true_str, tok->pb->buf, size1) == 0))
  849. {
  850. if (tok->st_pos == json_true_str_len)
  851. {
  852. current = json_object_new_boolean(1);
  853. if (current == NULL)
  854. {
  855. tok->err = json_tokener_error_memory;
  856. goto out;
  857. }
  858. saved_state = json_tokener_state_finish;
  859. state = json_tokener_state_eatws;
  860. goto redo_char;
  861. }
  862. }
  863. else if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  864. strncasecmp(json_false_str, tok->pb->buf, size2) == 0) ||
  865. (strncmp(json_false_str, tok->pb->buf, size2) == 0))
  866. {
  867. if (tok->st_pos == json_false_str_len)
  868. {
  869. current = json_object_new_boolean(0);
  870. if (current == NULL)
  871. {
  872. tok->err = json_tokener_error_memory;
  873. goto out;
  874. }
  875. saved_state = json_tokener_state_finish;
  876. state = json_tokener_state_eatws;
  877. goto redo_char;
  878. }
  879. }
  880. else
  881. {
  882. tok->err = json_tokener_error_parse_boolean;
  883. goto out;
  884. }
  885. tok->st_pos++;
  886. }
  887. break;
  888. case json_tokener_state_number:
  889. {
  890. /* Advance until we change state */
  891. const char *case_start = str;
  892. int case_len = 0;
  893. int is_exponent = 0;
  894. int neg_sign_ok = 1;
  895. int pos_sign_ok = 0;
  896. if (printbuf_length(tok->pb) > 0)
  897. {
  898. /* We don't save all state from the previous incremental parse
  899. so we need to re-generate it based on the saved string so far.
  900. */
  901. char *e_loc = strchr(tok->pb->buf, 'e');
  902. if (!e_loc)
  903. e_loc = strchr(tok->pb->buf, 'E');
  904. if (e_loc)
  905. {
  906. char *last_saved_char =
  907. &tok->pb->buf[printbuf_length(tok->pb) - 1];
  908. is_exponent = 1;
  909. pos_sign_ok = neg_sign_ok = 1;
  910. /* If the "e" isn't at the end, we can't start with a '-' */
  911. if (e_loc != last_saved_char)
  912. {
  913. neg_sign_ok = 0;
  914. pos_sign_ok = 0;
  915. }
  916. // else leave it set to 1, i.e. start of the new input
  917. }
  918. }
  919. while (c && ((c >= '0' && c <= '9') ||
  920. (!is_exponent && (c == 'e' || c == 'E')) ||
  921. (neg_sign_ok && c == '-') || (pos_sign_ok && c == '+') ||
  922. (!tok->is_double && c == '.')))
  923. {
  924. pos_sign_ok = neg_sign_ok = 0;
  925. ++case_len;
  926. /* non-digit characters checks */
  927. /* note: since the main loop condition to get here was
  928. * an input starting with 0-9 or '-', we are
  929. * protected from input starting with '.' or
  930. * e/E.
  931. */
  932. switch (c)
  933. {
  934. case '.':
  935. tok->is_double = 1;
  936. pos_sign_ok = 1;
  937. neg_sign_ok = 1;
  938. break;
  939. case 'e': /* FALLTHRU */
  940. case 'E':
  941. is_exponent = 1;
  942. tok->is_double = 1;
  943. /* the exponent part can begin with a negative sign */
  944. pos_sign_ok = neg_sign_ok = 1;
  945. break;
  946. default: break;
  947. }
  948. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  949. {
  950. printbuf_memappend_checked(tok->pb, case_start, case_len);
  951. goto out;
  952. }
  953. }
  954. /*
  955. Now we know c isn't a valid number char, but check whether
  956. it might have been intended to be, and return a potentially
  957. more understandable error right away.
  958. However, if we're at the top-level, use the number as-is
  959. because c can be part of a new object to parse on the
  960. next call to json_tokener_parse().
  961. */
  962. if (tok->depth > 0 && c != ',' && c != ']' && c != '}' && c != '/' &&
  963. c != 'I' && c != 'i' && !is_ws_char(c))
  964. {
  965. tok->err = json_tokener_error_parse_number;
  966. goto out;
  967. }
  968. if (case_len > 0)
  969. printbuf_memappend_checked(tok->pb, case_start, case_len);
  970. // Check for -Infinity
  971. if (tok->pb->buf[0] == '-' && case_len <= 1 && (c == 'i' || c == 'I'))
  972. {
  973. state = json_tokener_state_inf;
  974. tok->st_pos = 0;
  975. goto redo_char;
  976. }
  977. if (tok->is_double && !(tok->flags & JSON_TOKENER_STRICT))
  978. {
  979. /* Trim some chars off the end, to allow things
  980. like "123e+" to parse ok. */
  981. while (printbuf_length(tok->pb) > 1)
  982. {
  983. char last_char = tok->pb->buf[printbuf_length(tok->pb) - 1];
  984. if (last_char != 'e' && last_char != 'E' &&
  985. last_char != '-' && last_char != '+')
  986. {
  987. break;
  988. }
  989. tok->pb->buf[printbuf_length(tok->pb) - 1] = '\0';
  990. printbuf_length(tok->pb)--;
  991. }
  992. }
  993. }
  994. {
  995. int64_t num64;
  996. uint64_t numuint64;
  997. double numd;
  998. if (!tok->is_double && tok->pb->buf[0] == '-' &&
  999. json_parse_int64(tok->pb->buf, &num64) == 0)
  1000. {
  1001. if (errno == ERANGE && (tok->flags & JSON_TOKENER_STRICT))
  1002. {
  1003. tok->err = json_tokener_error_parse_number;
  1004. goto out;
  1005. }
  1006. current = json_object_new_int64(num64);
  1007. if (current == NULL)
  1008. {
  1009. tok->err = json_tokener_error_memory;
  1010. goto out;
  1011. }
  1012. }
  1013. else if (!tok->is_double && tok->pb->buf[0] != '-' &&
  1014. json_parse_uint64(tok->pb->buf, &numuint64) == 0)
  1015. {
  1016. if (errno == ERANGE && (tok->flags & JSON_TOKENER_STRICT))
  1017. {
  1018. tok->err = json_tokener_error_parse_number;
  1019. goto out;
  1020. }
  1021. if (numuint64 && tok->pb->buf[0] == '0' &&
  1022. (tok->flags & JSON_TOKENER_STRICT))
  1023. {
  1024. tok->err = json_tokener_error_parse_number;
  1025. goto out;
  1026. }
  1027. if (numuint64 <= INT64_MAX)
  1028. {
  1029. num64 = (uint64_t)numuint64;
  1030. current = json_object_new_int64(num64);
  1031. if (current == NULL)
  1032. {
  1033. tok->err = json_tokener_error_memory;
  1034. goto out;
  1035. }
  1036. }
  1037. else
  1038. {
  1039. current = json_object_new_uint64(numuint64);
  1040. if (current == NULL)
  1041. {
  1042. tok->err = json_tokener_error_memory;
  1043. goto out;
  1044. }
  1045. }
  1046. }
  1047. else if (tok->is_double &&
  1048. json_tokener_parse_double(
  1049. tok->pb->buf, printbuf_length(tok->pb), &numd) == 0)
  1050. {
  1051. current = json_object_new_double_s(numd, tok->pb->buf);
  1052. if (current == NULL)
  1053. {
  1054. tok->err = json_tokener_error_memory;
  1055. goto out;
  1056. }
  1057. }
  1058. else
  1059. {
  1060. tok->err = json_tokener_error_parse_number;
  1061. goto out;
  1062. }
  1063. saved_state = json_tokener_state_finish;
  1064. state = json_tokener_state_eatws;
  1065. goto redo_char;
  1066. }
  1067. break;
  1068. case json_tokener_state_array_after_sep:
  1069. case json_tokener_state_array:
  1070. if (c == ']')
  1071. {
  1072. // Minimize memory usage; assume parsed objs are unlikely to be changed
  1073. json_object_array_shrink(current, 0);
  1074. if (state == json_tokener_state_array_after_sep &&
  1075. (tok->flags & JSON_TOKENER_STRICT))
  1076. {
  1077. tok->err = json_tokener_error_parse_unexpected;
  1078. goto out;
  1079. }
  1080. saved_state = json_tokener_state_finish;
  1081. state = json_tokener_state_eatws;
  1082. }
  1083. else
  1084. {
  1085. if (tok->depth >= tok->max_depth - 1)
  1086. {
  1087. tok->err = json_tokener_error_depth;
  1088. goto out;
  1089. }
  1090. state = json_tokener_state_array_add;
  1091. tok->depth++;
  1092. json_tokener_reset_level(tok, tok->depth);
  1093. goto redo_char;
  1094. }
  1095. break;
  1096. case json_tokener_state_array_add:
  1097. if (json_object_array_add(current, obj) != 0)
  1098. {
  1099. tok->err = json_tokener_error_memory;
  1100. goto out;
  1101. }
  1102. saved_state = json_tokener_state_array_sep;
  1103. state = json_tokener_state_eatws;
  1104. goto redo_char;
  1105. case json_tokener_state_array_sep:
  1106. if (c == ']')
  1107. {
  1108. // Minimize memory usage; assume parsed objs are unlikely to be changed
  1109. json_object_array_shrink(current, 0);
  1110. saved_state = json_tokener_state_finish;
  1111. state = json_tokener_state_eatws;
  1112. }
  1113. else if (c == ',')
  1114. {
  1115. saved_state = json_tokener_state_array_after_sep;
  1116. state = json_tokener_state_eatws;
  1117. }
  1118. else
  1119. {
  1120. tok->err = json_tokener_error_parse_array;
  1121. goto out;
  1122. }
  1123. break;
  1124. case json_tokener_state_object_field_start:
  1125. case json_tokener_state_object_field_start_after_sep:
  1126. if (c == '}')
  1127. {
  1128. if (state == json_tokener_state_object_field_start_after_sep &&
  1129. (tok->flags & JSON_TOKENER_STRICT))
  1130. {
  1131. tok->err = json_tokener_error_parse_unexpected;
  1132. goto out;
  1133. }
  1134. saved_state = json_tokener_state_finish;
  1135. state = json_tokener_state_eatws;
  1136. }
  1137. else if (c == '"' || c == '\'')
  1138. {
  1139. tok->quote_char = c;
  1140. printbuf_reset(tok->pb);
  1141. state = json_tokener_state_object_field;
  1142. }
  1143. else
  1144. {
  1145. tok->err = json_tokener_error_parse_object_key_name;
  1146. goto out;
  1147. }
  1148. break;
  1149. case json_tokener_state_object_field:
  1150. {
  1151. /* Advance until we change state */
  1152. const char *case_start = str;
  1153. while (1)
  1154. {
  1155. if (c == tok->quote_char)
  1156. {
  1157. printbuf_memappend_checked(tok->pb, case_start,
  1158. str - case_start);
  1159. obj_field_name = strdup(tok->pb->buf);
  1160. if (obj_field_name == NULL)
  1161. {
  1162. tok->err = json_tokener_error_memory;
  1163. goto out;
  1164. }
  1165. saved_state = json_tokener_state_object_field_end;
  1166. state = json_tokener_state_eatws;
  1167. break;
  1168. }
  1169. else if (c == '\\')
  1170. {
  1171. printbuf_memappend_checked(tok->pb, case_start,
  1172. str - case_start);
  1173. saved_state = json_tokener_state_object_field;
  1174. state = json_tokener_state_string_escape;
  1175. break;
  1176. }
  1177. else if ((tok->flags & JSON_TOKENER_STRICT) && (unsigned char)c <= 0x1f)
  1178. {
  1179. // Disallow control characters in strict mode
  1180. tok->err = json_tokener_error_parse_string;
  1181. goto out;
  1182. }
  1183. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  1184. {
  1185. printbuf_memappend_checked(tok->pb, case_start,
  1186. str - case_start);
  1187. goto out;
  1188. }
  1189. }
  1190. }
  1191. break;
  1192. case json_tokener_state_object_field_end:
  1193. if (c == ':')
  1194. {
  1195. saved_state = json_tokener_state_object_value;
  1196. state = json_tokener_state_eatws;
  1197. }
  1198. else
  1199. {
  1200. tok->err = json_tokener_error_parse_object_key_sep;
  1201. goto out;
  1202. }
  1203. break;
  1204. case json_tokener_state_object_value:
  1205. if (tok->depth >= tok->max_depth - 1)
  1206. {
  1207. tok->err = json_tokener_error_depth;
  1208. goto out;
  1209. }
  1210. state = json_tokener_state_object_value_add;
  1211. tok->depth++;
  1212. json_tokener_reset_level(tok, tok->depth);
  1213. goto redo_char;
  1214. case json_tokener_state_object_value_add:
  1215. if (json_object_object_add(current, obj_field_name, obj) != 0)
  1216. {
  1217. tok->err = json_tokener_error_memory;
  1218. goto out;
  1219. }
  1220. free(obj_field_name);
  1221. obj_field_name = NULL;
  1222. saved_state = json_tokener_state_object_sep;
  1223. state = json_tokener_state_eatws;
  1224. goto redo_char;
  1225. case json_tokener_state_object_sep:
  1226. /* { */
  1227. if (c == '}')
  1228. {
  1229. saved_state = json_tokener_state_finish;
  1230. state = json_tokener_state_eatws;
  1231. }
  1232. else if (c == ',')
  1233. {
  1234. saved_state = json_tokener_state_object_field_start_after_sep;
  1235. state = json_tokener_state_eatws;
  1236. }
  1237. else
  1238. {
  1239. tok->err = json_tokener_error_parse_object_value_sep;
  1240. goto out;
  1241. }
  1242. break;
  1243. }
  1244. (void)ADVANCE_CHAR(str, tok);
  1245. if (!c) // This is the char *before* advancing
  1246. break;
  1247. } /* while(PEEK_CHAR) */
  1248. out:
  1249. if ((tok->flags & JSON_TOKENER_VALIDATE_UTF8) && (nBytes != 0))
  1250. {
  1251. tok->err = json_tokener_error_parse_utf8_string;
  1252. }
  1253. if (c && (state == json_tokener_state_finish) && (tok->depth == 0) &&
  1254. (tok->flags & (JSON_TOKENER_STRICT | JSON_TOKENER_ALLOW_TRAILING_CHARS)) ==
  1255. JSON_TOKENER_STRICT)
  1256. {
  1257. /* unexpected char after JSON data */
  1258. tok->err = json_tokener_error_parse_unexpected;
  1259. }
  1260. if (!c)
  1261. {
  1262. /* We hit an eof char (0) */
  1263. if (state != json_tokener_state_finish && saved_state != json_tokener_state_finish)
  1264. tok->err = json_tokener_error_parse_eof;
  1265. }
  1266. #ifdef HAVE_USELOCALE
  1267. uselocale(oldlocale);
  1268. freelocale(newloc);
  1269. #elif defined(HAVE_SETLOCALE)
  1270. setlocale(LC_NUMERIC, oldlocale);
  1271. free(oldlocale);
  1272. #endif
  1273. if (tok->err == json_tokener_success)
  1274. {
  1275. json_object *ret = json_object_get(current);
  1276. int ii;
  1277. /* Partially reset, so we parse additional objects on subsequent calls. */
  1278. for (ii = tok->depth; ii >= 0; ii--)
  1279. json_tokener_reset_level(tok, ii);
  1280. return ret;
  1281. }
  1282. MC_DEBUG("json_tokener_parse_ex: error %s at offset %d\n", json_tokener_errors[tok->err],
  1283. tok->char_offset);
  1284. return NULL;
  1285. }
  1286. static json_bool json_tokener_validate_utf8(const char c, unsigned int *nBytes)
  1287. {
  1288. unsigned char chr = c;
  1289. if (*nBytes == 0)
  1290. {
  1291. if (chr >= 0x80)
  1292. {
  1293. if ((chr & 0xe0) == 0xc0)
  1294. *nBytes = 1;
  1295. else if ((chr & 0xf0) == 0xe0)
  1296. *nBytes = 2;
  1297. else if ((chr & 0xf8) == 0xf0)
  1298. *nBytes = 3;
  1299. else
  1300. return 0;
  1301. }
  1302. }
  1303. else
  1304. {
  1305. if ((chr & 0xC0) != 0x80)
  1306. return 0;
  1307. (*nBytes)--;
  1308. }
  1309. return 1;
  1310. }
  1311. void json_tokener_set_flags(struct json_tokener *tok, int flags)
  1312. {
  1313. tok->flags = flags;
  1314. }
  1315. size_t json_tokener_get_parse_end(struct json_tokener *tok)
  1316. {
  1317. assert(tok->char_offset >= 0); /* Drop this line when char_offset becomes a size_t */
  1318. return (size_t)tok->char_offset;
  1319. }
  1320. static int json_tokener_parse_double(const char *buf, int len, double *retval)
  1321. {
  1322. char *end;
  1323. *retval = strtod(buf, &end);
  1324. if (buf + len == end)
  1325. return 0; // It worked
  1326. return 1;
  1327. }