You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

json_tokener.c 37 kB

json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
5 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
json_tokener_parse_ex: handle out of memory errors Do not silently truncate values or skip entries if out of memory errors occur. Proof of Concept: - Create poc.c, a program which creates an eight megabyte large json object with key "A" and a lot of "B"s as value, one of them is UTF-formatted: ```c #include <err.h> #include <stdio.h> #include <string.h> #include "json.h" #define STR_LEN (8 * 1024 * 1024) #define STR_PREFIX "{ \"A\": \"" #define STR_SUFFIX "\\u0042\" }" int main(void) { char *str; struct json_tokener *tok; struct json_object *obj; if ((tok = json_tokener_new()) == NULL) errx(1, "json_tokener_new"); if ((str = malloc(STR_LEN)) == NULL) err(1, "malloc"); memset(str, 'B', STR_LEN); memcpy(str, STR_PREFIX, sizeof(STR_PREFIX) - 1); memcpy(str + STR_LEN - sizeof(STR_SUFFIX), STR_SUFFIX, sizeof(STR_SUFFIX)); obj = json_tokener_parse(str); free(str); printf("%p\n", obj); if (obj != NULL) { printf("%.*s\n", 50, json_object_to_json_string(obj)); json_object_put(obj); } json_tokener_free(tok); return 0; } ``` - Compile and run poc, assuming you have enough free heap space: ``` gcc $(pkg-config --cflags --libs) -o poc poc.c ./poc 0x559421e15de0 { "A": "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB ``` - Reduce available heap and run again, which leads to truncation: ``` ulimit -d 10000 ./poc 0x555a5b453de0 { "A": "B" } ``` - Compile json-c with this change and run with reduced heap again: ``` ulimit -d 10000 ./poc (nil) ``` The output is limited to 70 characters, i.e. json-c parses the 8 MB string correctly but the poc does not print all of them to the screen. The truncation occurs because the parser tries to add all chars up to the UTF-8 formatted 'B' at once. Since memory is limited to 10 MB there is not enough for this operation. The parser does not fail but continues normally. Another possibility is to create a json file close to 2 GB and run a program on a system with limited amount of RAM, i.e. around 3 GB. But ulimit restrictions are much easier for proof of concepts. Treat memory errors correctly and abort operations.
3 years ago
5 years ago
5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375
  1. /*
  2. * $Id: json_tokener.c,v 1.20 2006/07/25 03:24:50 mclark Exp $
  3. *
  4. * Copyright (c) 2004, 2005 Metaparadigm Pte. Ltd.
  5. * Michael Clark <michael@metaparadigm.com>
  6. *
  7. * This library is free software; you can redistribute it and/or modify
  8. * it under the terms of the MIT license. See COPYING for details.
  9. *
  10. *
  11. * Copyright (c) 2008-2009 Yahoo! Inc. All rights reserved.
  12. * The copyrights to the contents of this file are licensed under the MIT License
  13. * (https://www.opensource.org/licenses/mit-license.php)
  14. */
  15. #include "config.h"
  16. #include "math_compat.h"
  17. #include <assert.h>
  18. #include <errno.h>
  19. #include <limits.h>
  20. #include <math.h>
  21. #include <stddef.h>
  22. #include <stdio.h>
  23. #include <stdlib.h>
  24. #include <string.h>
  25. #include "debug.h"
  26. #include "json_inttypes.h"
  27. #include "json_object.h"
  28. #include "json_object_private.h"
  29. #include "json_tokener.h"
  30. #include "json_util.h"
  31. #include "printbuf.h"
  32. #include "strdup_compat.h"
  33. #ifdef HAVE_LOCALE_H
  34. #include <locale.h>
  35. #endif /* HAVE_LOCALE_H */
  36. #ifdef HAVE_XLOCALE_H
  37. #include <xlocale.h>
  38. #endif
  39. #ifdef HAVE_STRINGS_H
  40. #include <strings.h>
  41. #endif /* HAVE_STRINGS_H */
  42. #define jt_hexdigit(x) (((x) <= '9') ? (x) - '0' : ((x)&7) + 9)
  43. #if !HAVE_STRNCASECMP && defined(_MSC_VER)
  44. /* MSC has the version as _strnicmp */
  45. #define strncasecmp _strnicmp
  46. #elif !HAVE_STRNCASECMP
  47. #error You do not have strncasecmp on your system.
  48. #endif /* HAVE_STRNCASECMP */
  49. #if defined(_MSC_VER) && (_MSC_VER <= 1800)
  50. /* VS2013 doesn't know about "inline" */
  51. #define inline __inline
  52. #elif defined(AIX_CC)
  53. #define inline
  54. #endif
  55. /* The following helper functions are used to speed up parsing. They
  56. * are faster than their ctype counterparts because they assume that
  57. * the input is in ASCII and that the locale is set to "C". The
  58. * compiler will also inline these functions, providing an additional
  59. * speedup by saving on function calls.
  60. */
  61. static inline int is_ws_char(char c)
  62. {
  63. return c == ' '
  64. || c == '\t'
  65. || c == '\n'
  66. || c == '\r';
  67. }
  68. static inline int is_hex_char(char c)
  69. {
  70. return (c >= '0' && c <= '9')
  71. || (c >= 'A' && c <= 'F')
  72. || (c >= 'a' && c <= 'f');
  73. }
  74. /* Use C99 NAN by default; if not available, nan("") should work too. */
  75. #ifndef NAN
  76. #define NAN nan("")
  77. #endif /* !NAN */
  78. static const char json_null_str[] = "null";
  79. static const int json_null_str_len = sizeof(json_null_str) - 1;
  80. static const char json_inf_str[] = "Infinity";
  81. /* Swapped case "Infinity" to avoid need to call tolower() on input chars: */
  82. static const char json_inf_str_invert[] = "iNFINITY";
  83. static const unsigned int json_inf_str_len = sizeof(json_inf_str) - 1;
  84. static const char json_nan_str[] = "NaN";
  85. static const int json_nan_str_len = sizeof(json_nan_str) - 1;
  86. static const char json_true_str[] = "true";
  87. static const int json_true_str_len = sizeof(json_true_str) - 1;
  88. static const char json_false_str[] = "false";
  89. static const int json_false_str_len = sizeof(json_false_str) - 1;
  90. /* clang-format off */
  91. static const char *json_tokener_errors[] = {
  92. "success",
  93. "continue",
  94. "nesting too deep",
  95. "unexpected end of data",
  96. "unexpected character",
  97. "null expected",
  98. "boolean expected",
  99. "number expected",
  100. "array value separator ',' expected",
  101. "quoted object property name expected",
  102. "object property name separator ':' expected",
  103. "object value separator ',' expected",
  104. "invalid string sequence",
  105. "expected comment",
  106. "invalid utf-8 string",
  107. "out of memory",
  108. "buffer size overflow"
  109. };
  110. /* clang-format on */
  111. /**
  112. * validete the utf-8 string in strict model.
  113. * if not utf-8 format, return err.
  114. */
  115. static json_bool json_tokener_validate_utf8(const char c, unsigned int *nBytes);
  116. static int json_tokener_parse_double(const char *buf, int len, double *retval);
  117. const char *json_tokener_error_desc(enum json_tokener_error jerr)
  118. {
  119. int jerr_int = (int)jerr;
  120. if (jerr_int < 0 ||
  121. jerr_int >= (int)(sizeof(json_tokener_errors) / sizeof(json_tokener_errors[0])))
  122. return "Unknown error, "
  123. "invalid json_tokener_error value passed to json_tokener_error_desc()";
  124. return json_tokener_errors[jerr];
  125. }
  126. enum json_tokener_error json_tokener_get_error(struct json_tokener *tok)
  127. {
  128. return tok->err;
  129. }
  130. /* Stuff for decoding unicode sequences */
  131. #define IS_HIGH_SURROGATE(uc) (((uc)&0xFC00) == 0xD800)
  132. #define IS_LOW_SURROGATE(uc) (((uc)&0xFC00) == 0xDC00)
  133. #define DECODE_SURROGATE_PAIR(hi, lo) ((((hi)&0x3FF) << 10) + ((lo)&0x3FF) + 0x10000)
  134. static unsigned char utf8_replacement_char[3] = {0xEF, 0xBF, 0xBD};
  135. struct json_tokener *json_tokener_new_ex(int depth)
  136. {
  137. struct json_tokener *tok;
  138. tok = (struct json_tokener *)calloc(1, sizeof(struct json_tokener));
  139. if (!tok)
  140. return NULL;
  141. tok->stack = (struct json_tokener_srec *)calloc(depth, sizeof(struct json_tokener_srec));
  142. if (!tok->stack)
  143. {
  144. free(tok);
  145. return NULL;
  146. }
  147. tok->pb = printbuf_new();
  148. if (!tok->pb)
  149. {
  150. free(tok->stack);
  151. free(tok);
  152. return NULL;
  153. }
  154. tok->max_depth = depth;
  155. json_tokener_reset(tok);
  156. return tok;
  157. }
  158. struct json_tokener *json_tokener_new(void)
  159. {
  160. return json_tokener_new_ex(JSON_TOKENER_DEFAULT_DEPTH);
  161. }
  162. void json_tokener_free(struct json_tokener *tok)
  163. {
  164. json_tokener_reset(tok);
  165. if (tok->pb)
  166. printbuf_free(tok->pb);
  167. free(tok->stack);
  168. free(tok);
  169. }
  170. static void json_tokener_reset_level(struct json_tokener *tok, int depth)
  171. {
  172. tok->stack[depth].state = json_tokener_state_eatws;
  173. tok->stack[depth].saved_state = json_tokener_state_start;
  174. json_object_put(tok->stack[depth].current);
  175. tok->stack[depth].current = NULL;
  176. free(tok->stack[depth].obj_field_name);
  177. tok->stack[depth].obj_field_name = NULL;
  178. }
  179. void json_tokener_reset(struct json_tokener *tok)
  180. {
  181. int i;
  182. if (!tok)
  183. return;
  184. for (i = tok->depth; i >= 0; i--)
  185. json_tokener_reset_level(tok, i);
  186. tok->depth = 0;
  187. tok->err = json_tokener_success;
  188. }
  189. struct json_object *json_tokener_parse(const char *str)
  190. {
  191. enum json_tokener_error jerr_ignored;
  192. struct json_object *obj;
  193. obj = json_tokener_parse_verbose(str, &jerr_ignored);
  194. return obj;
  195. }
  196. struct json_object *json_tokener_parse_verbose(const char *str, enum json_tokener_error *error)
  197. {
  198. struct json_tokener *tok;
  199. struct json_object *obj;
  200. tok = json_tokener_new();
  201. if (!tok)
  202. return NULL;
  203. obj = json_tokener_parse_ex(tok, str, -1);
  204. *error = tok->err;
  205. if (tok->err != json_tokener_success
  206. #if 0
  207. /* This would be a more sensible default, and cause parsing
  208. * things like "null123" to fail when the caller can't know
  209. * where the parsing left off, but starting to fail would
  210. * be a notable behaviour change. Save for a 1.0 release.
  211. */
  212. || json_tokener_get_parse_end(tok) != strlen(str)
  213. #endif
  214. )
  215. {
  216. if (obj != NULL)
  217. json_object_put(obj);
  218. obj = NULL;
  219. }
  220. json_tokener_free(tok);
  221. return obj;
  222. }
  223. #define state tok->stack[tok->depth].state
  224. #define saved_state tok->stack[tok->depth].saved_state
  225. #define current tok->stack[tok->depth].current
  226. #define obj_field_name tok->stack[tok->depth].obj_field_name
  227. /* Optimization:
  228. * json_tokener_parse_ex() consumed a lot of CPU in its main loop,
  229. * iterating character-by character. A large performance boost is
  230. * achieved by using tighter loops to locally handle units such as
  231. * comments and strings. Loops that handle an entire token within
  232. * their scope also gather entire strings and pass them to
  233. * printbuf_memappend() in a single call, rather than calling
  234. * printbuf_memappend() one char at a time.
  235. *
  236. * PEEK_CHAR() and ADVANCE_CHAR() macros are used for code that is
  237. * common to both the main loop and the tighter loops.
  238. */
  239. /* PEEK_CHAR(dest, tok) macro:
  240. * Peeks at the current char and stores it in dest.
  241. * Returns 1 on success, sets tok->err and returns 0 if no more chars.
  242. * Implicit inputs: str, len, nBytesp vars
  243. */
  244. #define PEEK_CHAR(dest, tok) \
  245. (((tok)->char_offset == len) \
  246. ? (((tok)->depth == 0 && state == json_tokener_state_eatws && \
  247. saved_state == json_tokener_state_finish) \
  248. ? (((tok)->err = json_tokener_success), 0) \
  249. : (((tok)->err = json_tokener_continue), 0)) \
  250. : (((tok->flags & JSON_TOKENER_VALIDATE_UTF8) && \
  251. (!json_tokener_validate_utf8(*str, nBytesp))) \
  252. ? ((tok->err = json_tokener_error_parse_utf8_string), 0) \
  253. : (((dest) = *str), 1)))
  254. /* ADVANCE_CHAR() macro:
  255. * Increments str & tok->char_offset.
  256. * For convenience of existing conditionals, returns the old value of c (0 on eof).
  257. * Implicit inputs: c var
  258. */
  259. #define ADVANCE_CHAR(str, tok) (++(str), ((tok)->char_offset)++, c)
  260. /* printbuf_memappend_checked(p, s, l) macro:
  261. * Add string s of length l to printbuffer p.
  262. * If operation fails abort parse operation with memory error.
  263. */
  264. #define printbuf_memappend_checked(p, s, l) \
  265. do { \
  266. if (printbuf_memappend((p), (s), (l)) < 0) \
  267. { \
  268. tok->err = json_tokener_error_memory; \
  269. goto out; \
  270. } \
  271. } while (0)
  272. /* End optimization macro defs */
  273. struct json_object *json_tokener_parse_ex(struct json_tokener *tok, const char *str, int len)
  274. {
  275. struct json_object *obj = NULL;
  276. char c = '\1';
  277. unsigned int nBytes = 0;
  278. unsigned int *nBytesp = &nBytes;
  279. #ifdef HAVE_USELOCALE
  280. locale_t oldlocale = uselocale(NULL);
  281. locale_t newloc;
  282. #elif defined(HAVE_SETLOCALE)
  283. char *oldlocale = NULL;
  284. #endif
  285. tok->char_offset = 0;
  286. tok->err = json_tokener_success;
  287. /* this interface is presently not 64-bit clean due to the int len argument
  288. * and the internal printbuf interface that takes 32-bit int len arguments
  289. * so the function limits the maximum string size to INT32_MAX (2GB).
  290. * If the function is called with len == -1 then strlen is called to check
  291. * the string length is less than INT32_MAX (2GB)
  292. */
  293. if ((len < -1) || (len == -1 && strlen(str) > INT32_MAX))
  294. {
  295. tok->err = json_tokener_error_size;
  296. return NULL;
  297. }
  298. #ifdef HAVE_USELOCALE
  299. {
  300. locale_t duploc = duplocale(oldlocale);
  301. newloc = newlocale(LC_NUMERIC_MASK, "C", duploc);
  302. if (newloc == NULL)
  303. {
  304. freelocale(duploc);
  305. return NULL;
  306. }
  307. #ifdef NEWLOCALE_NEEDS_FREELOCALE
  308. // Older versions of FreeBSD (<12.4) don't free the locale
  309. // passed to newlocale(), so do it here
  310. freelocale(duploc);
  311. #endif
  312. uselocale(newloc);
  313. }
  314. #elif defined(HAVE_SETLOCALE)
  315. {
  316. char *tmplocale;
  317. tmplocale = setlocale(LC_NUMERIC, NULL);
  318. if (tmplocale)
  319. {
  320. oldlocale = strdup(tmplocale);
  321. if (oldlocale == NULL)
  322. return NULL;
  323. }
  324. setlocale(LC_NUMERIC, "C");
  325. }
  326. #endif
  327. while (PEEK_CHAR(c, tok)) // Note: c might be '\0' !
  328. {
  329. redo_char:
  330. switch (state)
  331. {
  332. case json_tokener_state_eatws:
  333. /* Advance until we change state */
  334. while (is_ws_char(c))
  335. {
  336. if ((!ADVANCE_CHAR(str, tok)) || (!PEEK_CHAR(c, tok)))
  337. goto out;
  338. }
  339. if (c == '/' && !(tok->flags & JSON_TOKENER_STRICT))
  340. {
  341. printbuf_reset(tok->pb);
  342. printbuf_memappend_checked(tok->pb, &c, 1);
  343. state = json_tokener_state_comment_start;
  344. }
  345. else
  346. {
  347. state = saved_state;
  348. goto redo_char;
  349. }
  350. break;
  351. case json_tokener_state_start:
  352. switch (c)
  353. {
  354. case '{':
  355. state = json_tokener_state_eatws;
  356. saved_state = json_tokener_state_object_field_start;
  357. current = json_object_new_object();
  358. if (current == NULL)
  359. {
  360. tok->err = json_tokener_error_memory;
  361. goto out;
  362. }
  363. break;
  364. case '[':
  365. state = json_tokener_state_eatws;
  366. saved_state = json_tokener_state_array;
  367. current = json_object_new_array();
  368. if (current == NULL)
  369. {
  370. tok->err = json_tokener_error_memory;
  371. goto out;
  372. }
  373. break;
  374. case 'I':
  375. case 'i':
  376. state = json_tokener_state_inf;
  377. printbuf_reset(tok->pb);
  378. tok->st_pos = 0;
  379. goto redo_char;
  380. case 'N':
  381. case 'n':
  382. state = json_tokener_state_null; // or NaN
  383. printbuf_reset(tok->pb);
  384. tok->st_pos = 0;
  385. goto redo_char;
  386. case '\'':
  387. if (tok->flags & JSON_TOKENER_STRICT)
  388. {
  389. /* in STRICT mode only double-quote are allowed */
  390. tok->err = json_tokener_error_parse_unexpected;
  391. goto out;
  392. }
  393. /* FALLTHRU */
  394. case '"':
  395. state = json_tokener_state_string;
  396. printbuf_reset(tok->pb);
  397. tok->quote_char = c;
  398. break;
  399. case 'T':
  400. case 't':
  401. case 'F':
  402. case 'f':
  403. state = json_tokener_state_boolean;
  404. printbuf_reset(tok->pb);
  405. tok->st_pos = 0;
  406. goto redo_char;
  407. case '0':
  408. case '1':
  409. case '2':
  410. case '3':
  411. case '4':
  412. case '5':
  413. case '6':
  414. case '7':
  415. case '8':
  416. case '9':
  417. case '-':
  418. state = json_tokener_state_number;
  419. printbuf_reset(tok->pb);
  420. tok->is_double = 0;
  421. goto redo_char;
  422. default: tok->err = json_tokener_error_parse_unexpected; goto out;
  423. }
  424. break;
  425. case json_tokener_state_finish:
  426. if (tok->depth == 0)
  427. goto out;
  428. obj = json_object_get(current);
  429. json_tokener_reset_level(tok, tok->depth);
  430. tok->depth--;
  431. goto redo_char;
  432. case json_tokener_state_inf: /* aka starts with 'i' (or 'I', or "-i", or "-I") */
  433. {
  434. /* If we were guaranteed to have len set, then we could (usually) handle
  435. * the entire "Infinity" check in a single strncmp (strncasecmp), but
  436. * since len might be -1 (i.e. "read until \0"), we need to check it
  437. * a character at a time.
  438. * Trying to handle it both ways would make this code considerably more
  439. * complicated with likely little performance benefit.
  440. */
  441. int is_negative = 0;
  442. /* Note: tok->st_pos must be 0 when state is set to json_tokener_state_inf */
  443. while (tok->st_pos < (int)json_inf_str_len)
  444. {
  445. char inf_char = *str;
  446. if (inf_char != json_inf_str[tok->st_pos] &&
  447. ((tok->flags & JSON_TOKENER_STRICT) ||
  448. inf_char != json_inf_str_invert[tok->st_pos])
  449. )
  450. {
  451. tok->err = json_tokener_error_parse_unexpected;
  452. goto out;
  453. }
  454. tok->st_pos++;
  455. (void)ADVANCE_CHAR(str, tok);
  456. if (!PEEK_CHAR(c, tok))
  457. {
  458. /* out of input chars, for now at least */
  459. goto out;
  460. }
  461. }
  462. /* We checked the full length of "Infinity", so create the object.
  463. * When handling -Infinity, the number parsing code will have dropped
  464. * the "-" into tok->pb for us, so check it now.
  465. */
  466. if (printbuf_length(tok->pb) > 0 && *(tok->pb->buf) == '-')
  467. {
  468. is_negative = 1;
  469. }
  470. current = json_object_new_double(is_negative ? -INFINITY : INFINITY);
  471. if (current == NULL)
  472. {
  473. tok->err = json_tokener_error_memory;
  474. goto out;
  475. }
  476. saved_state = json_tokener_state_finish;
  477. state = json_tokener_state_eatws;
  478. goto redo_char;
  479. }
  480. break;
  481. case json_tokener_state_null: /* aka starts with 'n' */
  482. {
  483. int size;
  484. int size_nan;
  485. printbuf_memappend_checked(tok->pb, &c, 1);
  486. size = json_min(tok->st_pos + 1, json_null_str_len);
  487. size_nan = json_min(tok->st_pos + 1, json_nan_str_len);
  488. if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  489. strncasecmp(json_null_str, tok->pb->buf, size) == 0) ||
  490. (strncmp(json_null_str, tok->pb->buf, size) == 0))
  491. {
  492. if (tok->st_pos == json_null_str_len)
  493. {
  494. current = NULL;
  495. saved_state = json_tokener_state_finish;
  496. state = json_tokener_state_eatws;
  497. goto redo_char;
  498. }
  499. }
  500. else if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  501. strncasecmp(json_nan_str, tok->pb->buf, size_nan) == 0) ||
  502. (strncmp(json_nan_str, tok->pb->buf, size_nan) == 0))
  503. {
  504. if (tok->st_pos == json_nan_str_len)
  505. {
  506. current = json_object_new_double(NAN);
  507. if (current == NULL)
  508. {
  509. tok->err = json_tokener_error_memory;
  510. goto out;
  511. }
  512. saved_state = json_tokener_state_finish;
  513. state = json_tokener_state_eatws;
  514. goto redo_char;
  515. }
  516. }
  517. else
  518. {
  519. tok->err = json_tokener_error_parse_null;
  520. goto out;
  521. }
  522. tok->st_pos++;
  523. }
  524. break;
  525. case json_tokener_state_comment_start:
  526. if (c == '*')
  527. {
  528. state = json_tokener_state_comment;
  529. }
  530. else if (c == '/')
  531. {
  532. state = json_tokener_state_comment_eol;
  533. }
  534. else
  535. {
  536. tok->err = json_tokener_error_parse_comment;
  537. goto out;
  538. }
  539. printbuf_memappend_checked(tok->pb, &c, 1);
  540. break;
  541. case json_tokener_state_comment:
  542. {
  543. /* Advance until we change state */
  544. const char *case_start = str;
  545. while (c != '*')
  546. {
  547. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  548. {
  549. printbuf_memappend_checked(tok->pb, case_start,
  550. str - case_start);
  551. goto out;
  552. }
  553. }
  554. printbuf_memappend_checked(tok->pb, case_start, 1 + str - case_start);
  555. state = json_tokener_state_comment_end;
  556. }
  557. break;
  558. case json_tokener_state_comment_eol:
  559. {
  560. /* Advance until we change state */
  561. const char *case_start = str;
  562. while (c != '\n')
  563. {
  564. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  565. {
  566. printbuf_memappend_checked(tok->pb, case_start,
  567. str - case_start);
  568. goto out;
  569. }
  570. }
  571. printbuf_memappend_checked(tok->pb, case_start, str - case_start);
  572. MC_DEBUG("json_tokener_comment: %s\n", tok->pb->buf);
  573. state = json_tokener_state_eatws;
  574. }
  575. break;
  576. case json_tokener_state_comment_end:
  577. printbuf_memappend_checked(tok->pb, &c, 1);
  578. if (c == '/')
  579. {
  580. MC_DEBUG("json_tokener_comment: %s\n", tok->pb->buf);
  581. state = json_tokener_state_eatws;
  582. }
  583. else
  584. {
  585. state = json_tokener_state_comment;
  586. }
  587. break;
  588. case json_tokener_state_string:
  589. {
  590. /* Advance until we change state */
  591. const char *case_start = str;
  592. while (1)
  593. {
  594. if (c == tok->quote_char)
  595. {
  596. printbuf_memappend_checked(tok->pb, case_start,
  597. str - case_start);
  598. current =
  599. json_object_new_string_len(tok->pb->buf, tok->pb->bpos);
  600. if (current == NULL)
  601. {
  602. tok->err = json_tokener_error_memory;
  603. goto out;
  604. }
  605. saved_state = json_tokener_state_finish;
  606. state = json_tokener_state_eatws;
  607. break;
  608. }
  609. else if (c == '\\')
  610. {
  611. printbuf_memappend_checked(tok->pb, case_start,
  612. str - case_start);
  613. saved_state = json_tokener_state_string;
  614. state = json_tokener_state_string_escape;
  615. break;
  616. }
  617. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  618. {
  619. printbuf_memappend_checked(tok->pb, case_start,
  620. str - case_start);
  621. goto out;
  622. }
  623. }
  624. }
  625. break;
  626. case json_tokener_state_string_escape:
  627. switch (c)
  628. {
  629. case '"':
  630. case '\\':
  631. case '/':
  632. printbuf_memappend_checked(tok->pb, &c, 1);
  633. state = saved_state;
  634. break;
  635. case 'b':
  636. case 'n':
  637. case 'r':
  638. case 't':
  639. case 'f':
  640. if (c == 'b')
  641. printbuf_memappend_checked(tok->pb, "\b", 1);
  642. else if (c == 'n')
  643. printbuf_memappend_checked(tok->pb, "\n", 1);
  644. else if (c == 'r')
  645. printbuf_memappend_checked(tok->pb, "\r", 1);
  646. else if (c == 't')
  647. printbuf_memappend_checked(tok->pb, "\t", 1);
  648. else if (c == 'f')
  649. printbuf_memappend_checked(tok->pb, "\f", 1);
  650. state = saved_state;
  651. break;
  652. case 'u':
  653. tok->ucs_char = 0;
  654. tok->st_pos = 0;
  655. state = json_tokener_state_escape_unicode;
  656. break;
  657. default: tok->err = json_tokener_error_parse_string; goto out;
  658. }
  659. break;
  660. // ===================================================
  661. case json_tokener_state_escape_unicode:
  662. {
  663. /* Handle a 4-byte \uNNNN sequence, or two sequences if a surrogate pair */
  664. while (1)
  665. {
  666. if (!c || !is_hex_char(c))
  667. {
  668. tok->err = json_tokener_error_parse_string;
  669. goto out;
  670. }
  671. tok->ucs_char |=
  672. ((unsigned int)jt_hexdigit(c) << ((3 - tok->st_pos) * 4));
  673. tok->st_pos++;
  674. if (tok->st_pos >= 4)
  675. break;
  676. (void)ADVANCE_CHAR(str, tok);
  677. if (!PEEK_CHAR(c, tok))
  678. {
  679. /*
  680. * We're out of characters in the current call to
  681. * json_tokener_parse(), but a subsequent call might
  682. * provide us with more, so leave our current state
  683. * as-is (including tok->high_surrogate) and return.
  684. */
  685. goto out;
  686. }
  687. }
  688. tok->st_pos = 0;
  689. /* Now, we have a full \uNNNN sequence in tok->ucs_char */
  690. /* If the *previous* sequence was a high surrogate ... */
  691. if (tok->high_surrogate)
  692. {
  693. if (IS_LOW_SURROGATE(tok->ucs_char))
  694. {
  695. /* Recalculate the ucs_char, then fall thru to process normally */
  696. tok->ucs_char = DECODE_SURROGATE_PAIR(tok->high_surrogate,
  697. tok->ucs_char);
  698. }
  699. else
  700. {
  701. /* High surrogate was not followed by a low surrogate
  702. * Replace the high and process the rest normally
  703. */
  704. printbuf_memappend_checked(tok->pb,
  705. (char *)utf8_replacement_char, 3);
  706. }
  707. tok->high_surrogate = 0;
  708. }
  709. if (tok->ucs_char < 0x80)
  710. {
  711. unsigned char unescaped_utf[1];
  712. unescaped_utf[0] = tok->ucs_char;
  713. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 1);
  714. }
  715. else if (tok->ucs_char < 0x800)
  716. {
  717. unsigned char unescaped_utf[2];
  718. unescaped_utf[0] = 0xc0 | (tok->ucs_char >> 6);
  719. unescaped_utf[1] = 0x80 | (tok->ucs_char & 0x3f);
  720. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 2);
  721. }
  722. else if (IS_HIGH_SURROGATE(tok->ucs_char))
  723. {
  724. /*
  725. * The next two characters should be \u, HOWEVER,
  726. * we can't simply peek ahead here, because the
  727. * characters we need might not be passed to us
  728. * until a subsequent call to json_tokener_parse.
  729. * Instead, transition through a couple of states.
  730. * (now):
  731. * _escape_unicode => _unicode_need_escape
  732. * (see a '\\' char):
  733. * _unicode_need_escape => _unicode_need_u
  734. * (see a 'u' char):
  735. * _unicode_need_u => _escape_unicode
  736. * ...and we'll end up back around here.
  737. */
  738. tok->high_surrogate = tok->ucs_char;
  739. tok->ucs_char = 0;
  740. state = json_tokener_state_escape_unicode_need_escape;
  741. break;
  742. }
  743. else if (IS_LOW_SURROGATE(tok->ucs_char))
  744. {
  745. /* Got a low surrogate not preceded by a high */
  746. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  747. }
  748. else if (tok->ucs_char < 0x10000)
  749. {
  750. unsigned char unescaped_utf[3];
  751. unescaped_utf[0] = 0xe0 | (tok->ucs_char >> 12);
  752. unescaped_utf[1] = 0x80 | ((tok->ucs_char >> 6) & 0x3f);
  753. unescaped_utf[2] = 0x80 | (tok->ucs_char & 0x3f);
  754. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 3);
  755. }
  756. else if (tok->ucs_char < 0x110000)
  757. {
  758. unsigned char unescaped_utf[4];
  759. unescaped_utf[0] = 0xf0 | ((tok->ucs_char >> 18) & 0x07);
  760. unescaped_utf[1] = 0x80 | ((tok->ucs_char >> 12) & 0x3f);
  761. unescaped_utf[2] = 0x80 | ((tok->ucs_char >> 6) & 0x3f);
  762. unescaped_utf[3] = 0x80 | (tok->ucs_char & 0x3f);
  763. printbuf_memappend_checked(tok->pb, (char *)unescaped_utf, 4);
  764. }
  765. else
  766. {
  767. /* Don't know what we got--insert the replacement char */
  768. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  769. }
  770. state = saved_state; // i.e. _state_string or _state_object_field
  771. }
  772. break;
  773. case json_tokener_state_escape_unicode_need_escape:
  774. // We get here after processing a high_surrogate
  775. // require a '\\' char
  776. if (!c || c != '\\')
  777. {
  778. /* Got a high surrogate without another sequence following
  779. * it. Put a replacement char in for the high surrogate
  780. * and pop back up to _state_string or _state_object_field.
  781. */
  782. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  783. tok->high_surrogate = 0;
  784. tok->ucs_char = 0;
  785. tok->st_pos = 0;
  786. state = saved_state;
  787. goto redo_char;
  788. }
  789. state = json_tokener_state_escape_unicode_need_u;
  790. break;
  791. case json_tokener_state_escape_unicode_need_u:
  792. /* We already had a \ char, check that it's \u */
  793. if (!c || c != 'u')
  794. {
  795. /* Got a high surrogate with some non-unicode escape
  796. * sequence following it.
  797. * Put a replacement char in for the high surrogate
  798. * and handle the escape sequence normally.
  799. */
  800. printbuf_memappend_checked(tok->pb, (char *)utf8_replacement_char, 3);
  801. tok->high_surrogate = 0;
  802. tok->ucs_char = 0;
  803. tok->st_pos = 0;
  804. state = json_tokener_state_string_escape;
  805. goto redo_char;
  806. }
  807. state = json_tokener_state_escape_unicode;
  808. break;
  809. // ===================================================
  810. case json_tokener_state_boolean:
  811. {
  812. int size1, size2;
  813. printbuf_memappend_checked(tok->pb, &c, 1);
  814. size1 = json_min(tok->st_pos + 1, json_true_str_len);
  815. size2 = json_min(tok->st_pos + 1, json_false_str_len);
  816. if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  817. strncasecmp(json_true_str, tok->pb->buf, size1) == 0) ||
  818. (strncmp(json_true_str, tok->pb->buf, size1) == 0))
  819. {
  820. if (tok->st_pos == json_true_str_len)
  821. {
  822. current = json_object_new_boolean(1);
  823. if (current == NULL)
  824. {
  825. tok->err = json_tokener_error_memory;
  826. goto out;
  827. }
  828. saved_state = json_tokener_state_finish;
  829. state = json_tokener_state_eatws;
  830. goto redo_char;
  831. }
  832. }
  833. else if ((!(tok->flags & JSON_TOKENER_STRICT) &&
  834. strncasecmp(json_false_str, tok->pb->buf, size2) == 0) ||
  835. (strncmp(json_false_str, tok->pb->buf, size2) == 0))
  836. {
  837. if (tok->st_pos == json_false_str_len)
  838. {
  839. current = json_object_new_boolean(0);
  840. if (current == NULL)
  841. {
  842. tok->err = json_tokener_error_memory;
  843. goto out;
  844. }
  845. saved_state = json_tokener_state_finish;
  846. state = json_tokener_state_eatws;
  847. goto redo_char;
  848. }
  849. }
  850. else
  851. {
  852. tok->err = json_tokener_error_parse_boolean;
  853. goto out;
  854. }
  855. tok->st_pos++;
  856. }
  857. break;
  858. case json_tokener_state_number:
  859. {
  860. /* Advance until we change state */
  861. const char *case_start = str;
  862. int case_len = 0;
  863. int is_exponent = 0;
  864. int neg_sign_ok = 1;
  865. int pos_sign_ok = 0;
  866. if (printbuf_length(tok->pb) > 0)
  867. {
  868. /* We don't save all state from the previous incremental parse
  869. so we need to re-generate it based on the saved string so far.
  870. */
  871. char *e_loc = strchr(tok->pb->buf, 'e');
  872. if (!e_loc)
  873. e_loc = strchr(tok->pb->buf, 'E');
  874. if (e_loc)
  875. {
  876. char *last_saved_char =
  877. &tok->pb->buf[printbuf_length(tok->pb) - 1];
  878. is_exponent = 1;
  879. pos_sign_ok = neg_sign_ok = 1;
  880. /* If the "e" isn't at the end, we can't start with a '-' */
  881. if (e_loc != last_saved_char)
  882. {
  883. neg_sign_ok = 0;
  884. pos_sign_ok = 0;
  885. }
  886. // else leave it set to 1, i.e. start of the new input
  887. }
  888. }
  889. while (c && ((c >= '0' && c <= '9') ||
  890. (!is_exponent && (c == 'e' || c == 'E')) ||
  891. (neg_sign_ok && c == '-') || (pos_sign_ok && c == '+') ||
  892. (!tok->is_double && c == '.')))
  893. {
  894. pos_sign_ok = neg_sign_ok = 0;
  895. ++case_len;
  896. /* non-digit characters checks */
  897. /* note: since the main loop condition to get here was
  898. * an input starting with 0-9 or '-', we are
  899. * protected from input starting with '.' or
  900. * e/E.
  901. */
  902. switch (c)
  903. {
  904. case '.':
  905. tok->is_double = 1;
  906. pos_sign_ok = 1;
  907. neg_sign_ok = 1;
  908. break;
  909. case 'e': /* FALLTHRU */
  910. case 'E':
  911. is_exponent = 1;
  912. tok->is_double = 1;
  913. /* the exponent part can begin with a negative sign */
  914. pos_sign_ok = neg_sign_ok = 1;
  915. break;
  916. default: break;
  917. }
  918. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  919. {
  920. printbuf_memappend_checked(tok->pb, case_start, case_len);
  921. goto out;
  922. }
  923. }
  924. /*
  925. Now we know c isn't a valid number char, but check whether
  926. it might have been intended to be, and return a potentially
  927. more understandable error right away.
  928. However, if we're at the top-level, use the number as-is
  929. because c can be part of a new object to parse on the
  930. next call to json_tokener_parse().
  931. */
  932. if (tok->depth > 0 && c != ',' && c != ']' && c != '}' && c != '/' &&
  933. c != 'I' && c != 'i' && !is_ws_char(c))
  934. {
  935. tok->err = json_tokener_error_parse_number;
  936. goto out;
  937. }
  938. if (case_len > 0)
  939. printbuf_memappend_checked(tok->pb, case_start, case_len);
  940. // Check for -Infinity
  941. if (tok->pb->buf[0] == '-' && case_len <= 1 && (c == 'i' || c == 'I'))
  942. {
  943. state = json_tokener_state_inf;
  944. tok->st_pos = 0;
  945. goto redo_char;
  946. }
  947. if (tok->is_double && !(tok->flags & JSON_TOKENER_STRICT))
  948. {
  949. /* Trim some chars off the end, to allow things
  950. like "123e+" to parse ok. */
  951. while (printbuf_length(tok->pb) > 1)
  952. {
  953. char last_char = tok->pb->buf[printbuf_length(tok->pb) - 1];
  954. if (last_char != 'e' && last_char != 'E' &&
  955. last_char != '-' && last_char != '+')
  956. {
  957. break;
  958. }
  959. tok->pb->buf[printbuf_length(tok->pb) - 1] = '\0';
  960. printbuf_length(tok->pb)--;
  961. }
  962. }
  963. }
  964. {
  965. int64_t num64;
  966. uint64_t numuint64;
  967. double numd;
  968. if (!tok->is_double && tok->pb->buf[0] == '-' &&
  969. json_parse_int64(tok->pb->buf, &num64) == 0)
  970. {
  971. if (errno == ERANGE && (tok->flags & JSON_TOKENER_STRICT))
  972. {
  973. tok->err = json_tokener_error_parse_number;
  974. goto out;
  975. }
  976. current = json_object_new_int64(num64);
  977. if (current == NULL)
  978. {
  979. tok->err = json_tokener_error_memory;
  980. goto out;
  981. }
  982. }
  983. else if (!tok->is_double && tok->pb->buf[0] != '-' &&
  984. json_parse_uint64(tok->pb->buf, &numuint64) == 0)
  985. {
  986. if (errno == ERANGE && (tok->flags & JSON_TOKENER_STRICT))
  987. {
  988. tok->err = json_tokener_error_parse_number;
  989. goto out;
  990. }
  991. if (numuint64 && tok->pb->buf[0] == '0' &&
  992. (tok->flags & JSON_TOKENER_STRICT))
  993. {
  994. tok->err = json_tokener_error_parse_number;
  995. goto out;
  996. }
  997. if (numuint64 <= INT64_MAX)
  998. {
  999. num64 = (uint64_t)numuint64;
  1000. current = json_object_new_int64(num64);
  1001. if (current == NULL)
  1002. {
  1003. tok->err = json_tokener_error_memory;
  1004. goto out;
  1005. }
  1006. }
  1007. else
  1008. {
  1009. current = json_object_new_uint64(numuint64);
  1010. if (current == NULL)
  1011. {
  1012. tok->err = json_tokener_error_memory;
  1013. goto out;
  1014. }
  1015. }
  1016. }
  1017. else if (tok->is_double &&
  1018. json_tokener_parse_double(
  1019. tok->pb->buf, printbuf_length(tok->pb), &numd) == 0)
  1020. {
  1021. current = json_object_new_double_s(numd, tok->pb->buf);
  1022. if (current == NULL)
  1023. {
  1024. tok->err = json_tokener_error_memory;
  1025. goto out;
  1026. }
  1027. }
  1028. else
  1029. {
  1030. tok->err = json_tokener_error_parse_number;
  1031. goto out;
  1032. }
  1033. saved_state = json_tokener_state_finish;
  1034. state = json_tokener_state_eatws;
  1035. goto redo_char;
  1036. }
  1037. break;
  1038. case json_tokener_state_array_after_sep:
  1039. case json_tokener_state_array:
  1040. if (c == ']')
  1041. {
  1042. // Minimize memory usage; assume parsed objs are unlikely to be changed
  1043. json_object_array_shrink(current, 0);
  1044. if (state == json_tokener_state_array_after_sep &&
  1045. (tok->flags & JSON_TOKENER_STRICT))
  1046. {
  1047. tok->err = json_tokener_error_parse_unexpected;
  1048. goto out;
  1049. }
  1050. saved_state = json_tokener_state_finish;
  1051. state = json_tokener_state_eatws;
  1052. }
  1053. else
  1054. {
  1055. if (tok->depth >= tok->max_depth - 1)
  1056. {
  1057. tok->err = json_tokener_error_depth;
  1058. goto out;
  1059. }
  1060. state = json_tokener_state_array_add;
  1061. tok->depth++;
  1062. json_tokener_reset_level(tok, tok->depth);
  1063. goto redo_char;
  1064. }
  1065. break;
  1066. case json_tokener_state_array_add:
  1067. if (json_object_array_add(current, obj) != 0)
  1068. {
  1069. tok->err = json_tokener_error_memory;
  1070. goto out;
  1071. }
  1072. saved_state = json_tokener_state_array_sep;
  1073. state = json_tokener_state_eatws;
  1074. goto redo_char;
  1075. case json_tokener_state_array_sep:
  1076. if (c == ']')
  1077. {
  1078. // Minimize memory usage; assume parsed objs are unlikely to be changed
  1079. json_object_array_shrink(current, 0);
  1080. saved_state = json_tokener_state_finish;
  1081. state = json_tokener_state_eatws;
  1082. }
  1083. else if (c == ',')
  1084. {
  1085. saved_state = json_tokener_state_array_after_sep;
  1086. state = json_tokener_state_eatws;
  1087. }
  1088. else
  1089. {
  1090. tok->err = json_tokener_error_parse_array;
  1091. goto out;
  1092. }
  1093. break;
  1094. case json_tokener_state_object_field_start:
  1095. case json_tokener_state_object_field_start_after_sep:
  1096. if (c == '}')
  1097. {
  1098. if (state == json_tokener_state_object_field_start_after_sep &&
  1099. (tok->flags & JSON_TOKENER_STRICT))
  1100. {
  1101. tok->err = json_tokener_error_parse_unexpected;
  1102. goto out;
  1103. }
  1104. saved_state = json_tokener_state_finish;
  1105. state = json_tokener_state_eatws;
  1106. }
  1107. else if (c == '"' || c == '\'')
  1108. {
  1109. tok->quote_char = c;
  1110. printbuf_reset(tok->pb);
  1111. state = json_tokener_state_object_field;
  1112. }
  1113. else
  1114. {
  1115. tok->err = json_tokener_error_parse_object_key_name;
  1116. goto out;
  1117. }
  1118. break;
  1119. case json_tokener_state_object_field:
  1120. {
  1121. /* Advance until we change state */
  1122. const char *case_start = str;
  1123. while (1)
  1124. {
  1125. if (c == tok->quote_char)
  1126. {
  1127. printbuf_memappend_checked(tok->pb, case_start,
  1128. str - case_start);
  1129. obj_field_name = strdup(tok->pb->buf);
  1130. if (obj_field_name == NULL)
  1131. {
  1132. tok->err = json_tokener_error_memory;
  1133. goto out;
  1134. }
  1135. saved_state = json_tokener_state_object_field_end;
  1136. state = json_tokener_state_eatws;
  1137. break;
  1138. }
  1139. else if (c == '\\')
  1140. {
  1141. printbuf_memappend_checked(tok->pb, case_start,
  1142. str - case_start);
  1143. saved_state = json_tokener_state_object_field;
  1144. state = json_tokener_state_string_escape;
  1145. break;
  1146. }
  1147. if (!ADVANCE_CHAR(str, tok) || !PEEK_CHAR(c, tok))
  1148. {
  1149. printbuf_memappend_checked(tok->pb, case_start,
  1150. str - case_start);
  1151. goto out;
  1152. }
  1153. }
  1154. }
  1155. break;
  1156. case json_tokener_state_object_field_end:
  1157. if (c == ':')
  1158. {
  1159. saved_state = json_tokener_state_object_value;
  1160. state = json_tokener_state_eatws;
  1161. }
  1162. else
  1163. {
  1164. tok->err = json_tokener_error_parse_object_key_sep;
  1165. goto out;
  1166. }
  1167. break;
  1168. case json_tokener_state_object_value:
  1169. if (tok->depth >= tok->max_depth - 1)
  1170. {
  1171. tok->err = json_tokener_error_depth;
  1172. goto out;
  1173. }
  1174. state = json_tokener_state_object_value_add;
  1175. tok->depth++;
  1176. json_tokener_reset_level(tok, tok->depth);
  1177. goto redo_char;
  1178. case json_tokener_state_object_value_add:
  1179. json_object_object_add(current, obj_field_name, obj);
  1180. free(obj_field_name);
  1181. obj_field_name = NULL;
  1182. saved_state = json_tokener_state_object_sep;
  1183. state = json_tokener_state_eatws;
  1184. goto redo_char;
  1185. case json_tokener_state_object_sep:
  1186. /* { */
  1187. if (c == '}')
  1188. {
  1189. saved_state = json_tokener_state_finish;
  1190. state = json_tokener_state_eatws;
  1191. }
  1192. else if (c == ',')
  1193. {
  1194. saved_state = json_tokener_state_object_field_start_after_sep;
  1195. state = json_tokener_state_eatws;
  1196. }
  1197. else
  1198. {
  1199. tok->err = json_tokener_error_parse_object_value_sep;
  1200. goto out;
  1201. }
  1202. break;
  1203. }
  1204. (void)ADVANCE_CHAR(str, tok);
  1205. if (!c) // This is the char *before* advancing
  1206. break;
  1207. } /* while(PEEK_CHAR) */
  1208. out:
  1209. if ((tok->flags & JSON_TOKENER_VALIDATE_UTF8) && (nBytes != 0))
  1210. {
  1211. tok->err = json_tokener_error_parse_utf8_string;
  1212. }
  1213. if (c && (state == json_tokener_state_finish) && (tok->depth == 0) &&
  1214. (tok->flags & (JSON_TOKENER_STRICT | JSON_TOKENER_ALLOW_TRAILING_CHARS)) ==
  1215. JSON_TOKENER_STRICT)
  1216. {
  1217. /* unexpected char after JSON data */
  1218. tok->err = json_tokener_error_parse_unexpected;
  1219. }
  1220. if (!c)
  1221. {
  1222. /* We hit an eof char (0) */
  1223. if (state != json_tokener_state_finish && saved_state != json_tokener_state_finish)
  1224. tok->err = json_tokener_error_parse_eof;
  1225. }
  1226. #ifdef HAVE_USELOCALE
  1227. uselocale(oldlocale);
  1228. freelocale(newloc);
  1229. #elif defined(HAVE_SETLOCALE)
  1230. setlocale(LC_NUMERIC, oldlocale);
  1231. free(oldlocale);
  1232. #endif
  1233. if (tok->err == json_tokener_success)
  1234. {
  1235. json_object *ret = json_object_get(current);
  1236. int ii;
  1237. /* Partially reset, so we parse additional objects on subsequent calls. */
  1238. for (ii = tok->depth; ii >= 0; ii--)
  1239. json_tokener_reset_level(tok, ii);
  1240. return ret;
  1241. }
  1242. MC_DEBUG("json_tokener_parse_ex: error %s at offset %d\n", json_tokener_errors[tok->err],
  1243. tok->char_offset);
  1244. return NULL;
  1245. }
  1246. static json_bool json_tokener_validate_utf8(const char c, unsigned int *nBytes)
  1247. {
  1248. unsigned char chr = c;
  1249. if (*nBytes == 0)
  1250. {
  1251. if (chr >= 0x80)
  1252. {
  1253. if ((chr & 0xe0) == 0xc0)
  1254. *nBytes = 1;
  1255. else if ((chr & 0xf0) == 0xe0)
  1256. *nBytes = 2;
  1257. else if ((chr & 0xf8) == 0xf0)
  1258. *nBytes = 3;
  1259. else
  1260. return 0;
  1261. }
  1262. }
  1263. else
  1264. {
  1265. if ((chr & 0xC0) != 0x80)
  1266. return 0;
  1267. (*nBytes)--;
  1268. }
  1269. return 1;
  1270. }
  1271. void json_tokener_set_flags(struct json_tokener *tok, int flags)
  1272. {
  1273. tok->flags = flags;
  1274. }
  1275. size_t json_tokener_get_parse_end(struct json_tokener *tok)
  1276. {
  1277. assert(tok->char_offset >= 0); /* Drop this line when char_offset becomes a size_t */
  1278. return (size_t)tok->char_offset;
  1279. }
  1280. static int json_tokener_parse_double(const char *buf, int len, double *retval)
  1281. {
  1282. char *end;
  1283. *retval = strtod(buf, &end);
  1284. if (buf + len == end)
  1285. return 0; // It worked
  1286. return 1;
  1287. }