You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_parse.c 26 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660
  1. #include <assert.h>
  2. #include <stddef.h>
  3. #include <stdio.h>
  4. #include <stdlib.h>
  5. #include <string.h>
  6. #include "json.h"
  7. #include "json_tokener.h"
  8. #include "json_visit.h"
  9. static void test_basic_parse(void);
  10. static void test_utf8_parse(void);
  11. static void test_verbose_parse(void);
  12. static void test_incremental_parse(void);
  13. int main(void)
  14. {
  15. MC_SET_DEBUG(1);
  16. static const char separator[] = "==================================";
  17. test_basic_parse();
  18. puts(separator);
  19. test_utf8_parse();
  20. puts(separator);
  21. test_verbose_parse();
  22. puts(separator);
  23. test_incremental_parse();
  24. puts(separator);
  25. return 0;
  26. }
  27. static json_c_visit_userfunc clear_serializer;
  28. static void do_clear_serializer(json_object *jso);
  29. static void single_incremental_parse(const char *test_string, int clear_serializer)
  30. {
  31. int ii;
  32. int chunksize = atoi(getenv("TEST_PARSE_CHUNKSIZE"));
  33. struct json_tokener *tok;
  34. enum json_tokener_error jerr;
  35. json_object *all_at_once_obj, *new_obj;
  36. const char *all_at_once_str, *new_str;
  37. all_at_once_obj = json_tokener_parse(test_string);
  38. if (clear_serializer)
  39. do_clear_serializer(all_at_once_obj);
  40. all_at_once_str = json_object_to_json_string(all_at_once_obj);
  41. tok = json_tokener_new();
  42. int test_string_len = strlen(test_string) + 1; // Including '\0' !
  43. for (ii = 0; ii < test_string_len; ii += chunksize)
  44. {
  45. int len_to_parse = chunksize;
  46. if (ii + chunksize > test_string_len)
  47. len_to_parse = test_string_len - ii;
  48. if (getenv("TEST_PARSE_DEBUG") != NULL)
  49. printf(" chunk: %.*s\n", len_to_parse, &test_string[ii]);
  50. new_obj = json_tokener_parse_ex(tok, &test_string[ii], len_to_parse);
  51. jerr = json_tokener_get_error(tok);
  52. if (jerr != json_tokener_continue || new_obj)
  53. break;
  54. }
  55. if (clear_serializer && new_obj)
  56. do_clear_serializer(new_obj);
  57. new_str = json_object_to_json_string(new_obj);
  58. if (strcmp(all_at_once_str, new_str) != 0)
  59. {
  60. printf("ERROR: failed to parse (%s) in %d byte chunks: %s != %s\n", test_string,
  61. chunksize, all_at_once_str, new_str);
  62. }
  63. json_tokener_free(tok);
  64. if (all_at_once_obj)
  65. json_object_put(all_at_once_obj);
  66. if (new_obj)
  67. json_object_put(new_obj);
  68. }
  69. static void single_basic_parse(const char *test_string, int clear_serializer)
  70. {
  71. json_object *new_obj;
  72. new_obj = json_tokener_parse(test_string);
  73. if (clear_serializer)
  74. do_clear_serializer(new_obj);
  75. printf("new_obj.to_string(%s)=%s\n", test_string, json_object_to_json_string(new_obj));
  76. json_object_put(new_obj);
  77. if (getenv("TEST_PARSE_CHUNKSIZE") != NULL)
  78. single_incremental_parse(test_string, clear_serializer);
  79. }
  80. static void test_basic_parse()
  81. {
  82. single_basic_parse("\"\003\"", 0);
  83. single_basic_parse("/* hello */\"foo\"", 0);
  84. single_basic_parse("// hello\n\"foo\"", 0);
  85. single_basic_parse("\"foo\"blue", 0);
  86. single_basic_parse("\'foo\'", 0);
  87. single_basic_parse("\"\\u0041\\u0042\\u0043\"", 0);
  88. single_basic_parse("\"\\u4e16\\u754c\\u00df\"", 0);
  89. single_basic_parse("\"\\u4E16\"", 0);
  90. single_basic_parse("\"\\u4e1\"", 0);
  91. single_basic_parse("\"\\u4e1@\"", 0);
  92. single_basic_parse("\"\\ud840\\u4e16\"", 0);
  93. single_basic_parse("\"\\ud840\"", 0);
  94. single_basic_parse("\"\\udd27\"", 0);
  95. // Test with a "short" high surrogate
  96. single_basic_parse("[9,'\\uDAD", 0);
  97. single_basic_parse("null", 0);
  98. single_basic_parse("NaN", 0);
  99. single_basic_parse("-NaN", 0); /* non-sensical, returns null */
  100. single_basic_parse("Inf", 0); /* must use full string, returns null */
  101. single_basic_parse("inf", 0); /* must use full string, returns null */
  102. single_basic_parse("Infinity", 0);
  103. single_basic_parse("infinity", 0);
  104. single_basic_parse("-Infinity", 0);
  105. single_basic_parse("-infinity", 0);
  106. single_basic_parse("{ \"min\": Infinity, \"max\": -Infinity}", 0);
  107. single_basic_parse("Infinity!", 0);
  108. single_basic_parse("Infinitynull", 0);
  109. single_basic_parse("InfinityXXXX", 0);
  110. single_basic_parse("-Infinitynull", 0);
  111. single_basic_parse("-InfinityXXXX", 0);
  112. single_basic_parse("Infinoodle", 0);
  113. single_basic_parse("InfinAAA", 0);
  114. single_basic_parse("-Infinoodle", 0);
  115. single_basic_parse("-InfinAAA", 0);
  116. single_basic_parse("True", 0);
  117. single_basic_parse("False", 0);
  118. /* not case sensitive */
  119. single_basic_parse("tRue", 0);
  120. single_basic_parse("fAlse", 0);
  121. single_basic_parse("nAn", 0);
  122. single_basic_parse("iNfinity", 0);
  123. single_basic_parse("12", 0);
  124. single_basic_parse("12.3", 0);
  125. /* Even though, when using json_tokener_parse() there's no way to
  126. * know when there is more data after the parsed object,
  127. * an object is successfully returned anyway (in some cases)
  128. */
  129. single_basic_parse("12.3.4", 0);
  130. single_basic_parse("2015-01-15", 0);
  131. single_basic_parse("12.3xxx", 0);
  132. single_basic_parse("12.3{\"a\":123}", 0);
  133. single_basic_parse("12.3\n", 0);
  134. single_basic_parse("12.3 ", 0);
  135. single_basic_parse("{\"FoO\" : -12.3E512}", 0);
  136. single_basic_parse("{\"FoO\" : -12.3e512}", 0);
  137. single_basic_parse("{\"FoO\" : -12.3E51.2}", 0); /* non-sensical, returns null */
  138. single_basic_parse("{\"FoO\" : -12.3E512E12}", 0); /* non-sensical, returns null */
  139. single_basic_parse("[\"\\n\"]", 0);
  140. single_basic_parse("[\"\\nabc\\n\"]", 0);
  141. single_basic_parse("[null]", 0);
  142. single_basic_parse("[]", 0);
  143. single_basic_parse("[false]", 0);
  144. single_basic_parse("[\"abc\",null,\"def\",12]", 0);
  145. single_basic_parse("{}", 0);
  146. single_basic_parse("{ \"foo\": \"bar\" }", 0);
  147. single_basic_parse("{ \'foo\': \'bar\' }", 0);
  148. single_basic_parse("{ \"foo\": \"bar\", \"baz\": null, \"bool0\": true }", 0);
  149. single_basic_parse("{ \"foo\": [null, \"foo\"] }", 0);
  150. single_basic_parse("{ \"abc\": 12, \"foo\": \"bar\", \"bool0\": false, \"bool1\": true, "
  151. "\"arr\": [ 1, 2, 3, null, 5 ] }",
  152. 0);
  153. single_basic_parse("{ \"abc\": \"blue\nred\\ngreen\" }", 0);
  154. // Clear serializer for these tests so we see the actual parsed value.
  155. single_basic_parse("null", 1);
  156. single_basic_parse("false", 1);
  157. single_basic_parse("[0e]", 1);
  158. single_basic_parse("[0e+]", 1);
  159. single_basic_parse("[0e+-1]", 1);
  160. single_basic_parse("\"hello world!\"", 1);
  161. // uint64/int64 range test
  162. single_basic_parse("[9223372036854775806]", 1);
  163. single_basic_parse("[9223372036854775807]", 1);
  164. single_basic_parse("[9223372036854775808]", 1);
  165. single_basic_parse("[-9223372036854775807]", 1);
  166. single_basic_parse("[-9223372036854775808]", 1);
  167. single_basic_parse("[-9223372036854775809]", 1);
  168. single_basic_parse("[18446744073709551614]", 1);
  169. single_basic_parse("[18446744073709551615]", 1);
  170. single_basic_parse("[18446744073709551616]", 1);
  171. }
  172. static void test_utf8_parse()
  173. {
  174. // json_tokener_parse doesn't support checking for byte order marks.
  175. // It's the responsibility of the caller to detect and skip a BOM.
  176. // Both of these checks return null.
  177. char *utf8_bom = "\xEF\xBB\xBF";
  178. char *utf8_bom_and_chars = "\xEF\xBB\xBF{}";
  179. single_basic_parse(utf8_bom, 0);
  180. single_basic_parse(utf8_bom_and_chars, 0);
  181. }
  182. // Clear the re-serialization information that the tokener
  183. // saves to ensure that the output reflects the actual
  184. // values we parsed, rather than just the original input.
  185. static void do_clear_serializer(json_object *jso)
  186. {
  187. json_c_visit(jso, 0, clear_serializer, NULL);
  188. }
  189. static int clear_serializer(json_object *jso, int flags, json_object *parent_jso,
  190. const char *jso_key, size_t *jso_index, void *userarg)
  191. {
  192. if (jso)
  193. json_object_set_serializer(jso, NULL, NULL, NULL);
  194. return JSON_C_VISIT_RETURN_CONTINUE;
  195. }
  196. static void test_verbose_parse()
  197. {
  198. json_object *new_obj;
  199. enum json_tokener_error error = json_tokener_success;
  200. new_obj = json_tokener_parse_verbose("{ foo }", &error);
  201. assert(error == json_tokener_error_parse_object_key_name);
  202. assert(new_obj == NULL);
  203. new_obj = json_tokener_parse("{ foo }");
  204. assert(new_obj == NULL);
  205. new_obj = json_tokener_parse("foo");
  206. assert(new_obj == NULL);
  207. new_obj = json_tokener_parse_verbose("foo", &error);
  208. assert(new_obj == NULL);
  209. /* b/c the string starts with 'f' parsing return a boolean error */
  210. assert(error == json_tokener_error_parse_boolean);
  211. puts("json_tokener_parse_verbose() OK");
  212. }
  213. struct incremental_step
  214. {
  215. const char *string_to_parse;
  216. int length;
  217. int char_offset;
  218. enum json_tokener_error expected_error;
  219. int reset_tokener; /* Set to 1 to call json_tokener_reset() after parsing */
  220. int tok_flags; /* JSON_TOKENER_* flags to pass to json_tokener_set_flags() */
  221. } incremental_steps[] = {
  222. /* Check that full json messages can be parsed, both w/ and w/o a reset */
  223. {"{ \"foo\": 123 }", -1, -1, json_tokener_success, 0},
  224. {"{ \"foo\": 456 }", -1, -1, json_tokener_success, 1},
  225. {"{ \"foo\": 789 }", -1, -1, json_tokener_success, 1},
  226. /* Check the comment parse*/
  227. {"/* hello */{ \"foo\"", -1, -1, json_tokener_continue, 0},
  228. {"/* hello */:/* hello */", -1, -1, json_tokener_continue, 0},
  229. {"\"bar\"/* hello */", -1, -1, json_tokener_continue, 0},
  230. {"}/* hello */", -1, -1, json_tokener_success, 1},
  231. {"/ hello ", -1, 1, json_tokener_error_parse_comment, 1},
  232. {"/* hello\"foo\"", -1, -1, json_tokener_continue, 1},
  233. {"/* hello*\"foo\"", -1, -1, json_tokener_continue, 1},
  234. {"// hello\"foo\"", -1, -1, json_tokener_continue, 1},
  235. /* Check a basic incremental parse */
  236. {"{ \"foo", -1, -1, json_tokener_continue, 0},
  237. {"\": {\"bar", -1, -1, json_tokener_continue, 0},
  238. {"\":13}}", -1, -1, json_tokener_success, 1},
  239. /* Check the UTF-16 surrogate pair handling in various ways.
  240. * Note: \ud843\udd1e is u+1D11E, Musical Symbol G Clef
  241. * Your terminal may not display these correctly, in particular
  242. * PuTTY doesn't currently show this character.
  243. */
  244. /* parse one char at every time */
  245. {"\"\\", -1, -1, json_tokener_continue, 0},
  246. {"u", -1, -1, json_tokener_continue, 0},
  247. {"d", -1, -1, json_tokener_continue, 0},
  248. {"8", -1, -1, json_tokener_continue, 0},
  249. {"3", -1, -1, json_tokener_continue, 0},
  250. {"4", -1, -1, json_tokener_continue, 0},
  251. {"\\", -1, -1, json_tokener_continue, 0},
  252. {"u", -1, -1, json_tokener_continue, 0},
  253. {"d", -1, -1, json_tokener_continue, 0},
  254. {"d", -1, -1, json_tokener_continue, 0},
  255. {"1", -1, -1, json_tokener_continue, 0},
  256. {"e\"", -1, -1, json_tokener_success, 1},
  257. /* parse two char at every time */
  258. {"\"\\u", -1, -1, json_tokener_continue, 0},
  259. {"d8", -1, -1, json_tokener_continue, 0},
  260. {"34", -1, -1, json_tokener_continue, 0},
  261. {"\\u", -1, -1, json_tokener_continue, 0},
  262. {"dd", -1, -1, json_tokener_continue, 0},
  263. {"1e\"", -1, -1, json_tokener_success, 1},
  264. /* check the low surrogate pair */
  265. {"\"\\ud834", -1, -1, json_tokener_continue, 0},
  266. {"\\udd1e\"", -1, -1, json_tokener_success, 1},
  267. {"\"\\ud834\\", -1, -1, json_tokener_continue, 0},
  268. {"udd1e\"", -1, -1, json_tokener_success, 1},
  269. {"\"\\ud834\\u", -1, -1, json_tokener_continue, 0},
  270. {"dd1e\"", -1, -1, json_tokener_success, 1},
  271. {"\"fff \\ud834\\ud", -1, -1, json_tokener_continue, 0},
  272. {"d1e bar\"", -1, -1, json_tokener_success, 1},
  273. {"\"fff \\ud834\\udd", -1, -1, json_tokener_continue, 0},
  274. {"1e bar\"", -1, -1, json_tokener_success, 1},
  275. /* \ud83d\ude00 is U+1F600, Grinning Face
  276. * Displays fine in PuTTY, though you may need "less -r"
  277. */
  278. {"\"fff \\ud83d\\ude", -1, -1, json_tokener_continue, 0},
  279. {"00 bar\"", -1, -1, json_tokener_success, 1},
  280. /* Check that json_tokener_reset actually resets */
  281. {"{ \"foo", -1, -1, json_tokener_continue, 1},
  282. {": \"bar\"}", -1, 0, json_tokener_error_parse_unexpected, 1},
  283. /* Check incremental parsing with trailing characters */
  284. {"{ \"foo", -1, -1, json_tokener_continue, 0},
  285. {"\": {\"bar", -1, -1, json_tokener_continue, 0},
  286. {"\":13}}XXXX", 10, 6, json_tokener_success, 0},
  287. {"XXXX", 4, 0, json_tokener_error_parse_unexpected, 1},
  288. /* Check that trailing characters can change w/o a reset */
  289. {"{\"x\": 123 }\"X\"", -1, 11, json_tokener_success, 0},
  290. {"\"Y\"", -1, -1, json_tokener_success, 1},
  291. /* Trailing characters should cause a failure in strict mode */
  292. {"{\"foo\":9}{\"bar\":8}", -1, 9, json_tokener_error_parse_unexpected, 1, JSON_TOKENER_STRICT},
  293. /* ... unless explicitly allowed. */
  294. {"{\"foo\":9}{\"bar\":8}", -1, 9, json_tokener_success, 0,
  295. JSON_TOKENER_STRICT | JSON_TOKENER_ALLOW_TRAILING_CHARS},
  296. {"{\"b\":8}ignored garbage", -1, 7, json_tokener_success, 1,
  297. JSON_TOKENER_STRICT | JSON_TOKENER_ALLOW_TRAILING_CHARS},
  298. /* To stop parsing a number we need to reach a non-digit, e.g. a \0 */
  299. {"1", 1, 1, json_tokener_continue, 0},
  300. /* This should parse as the number 12, since it continues the "1" */
  301. {"2", 2, 1, json_tokener_success, 0},
  302. {"12{", 3, 2, json_tokener_success, 1},
  303. /* Parse number in strict mode */
  304. {"[02]", -1, 3, json_tokener_error_parse_number, 1, JSON_TOKENER_STRICT},
  305. {"0e+0", 5, 4, json_tokener_success, 1},
  306. {"[0e+0]", -1, -1, json_tokener_success, 1},
  307. /* The behavior when missing the exponent varies slightly */
  308. {"0e", 2, 2, json_tokener_continue, 1},
  309. {"0e", 3, 2, json_tokener_success, 1},
  310. {"0e", 3, 2, json_tokener_error_parse_eof, 1, JSON_TOKENER_STRICT},
  311. {"[0e]", -1, -1, json_tokener_success, 1},
  312. {"[0e]", -1, 3, json_tokener_error_parse_number, 1, JSON_TOKENER_STRICT},
  313. {"0e+", 3, 3, json_tokener_continue, 1},
  314. {"0e+", 4, 3, json_tokener_success, 1},
  315. {"0e+", 4, 3, json_tokener_error_parse_eof, 1, JSON_TOKENER_STRICT},
  316. {"[0e+]", -1, -1, json_tokener_success, 1},
  317. {"[0e+]", -1, 4, json_tokener_error_parse_number, 1, JSON_TOKENER_STRICT},
  318. {"0e-", 3, 3, json_tokener_continue, 1},
  319. {"0e-", 4, 3, json_tokener_success, 1},
  320. {"0e-", 4, 3, json_tokener_error_parse_eof, 1, JSON_TOKENER_STRICT},
  321. {"[0e-]", -1, -1, json_tokener_success, 1},
  322. {"[0e-]", -1, 4, json_tokener_error_parse_number, 1, JSON_TOKENER_STRICT},
  323. /* You might expect this to fail, but it won't because
  324. it's a valid partial parse; note the char_offset: */
  325. {"0e+-", 5, 3, json_tokener_success, 1},
  326. {"0e+-", 5, 3, json_tokener_error_parse_number, 1, JSON_TOKENER_STRICT},
  327. {"[0e+-]", -1, 4, json_tokener_error_parse_number, 1},
  328. /* Similar tests for other kinds of objects: */
  329. /* These could all return success immediately, since regardless of
  330. what follows the false/true/null token we *will* return a json object,
  331. but it currently doesn't work that way. hmm... */
  332. {"false", 5, 5, json_tokener_continue, 1},
  333. {"false", 6, 5, json_tokener_success, 1},
  334. {"true", 4, 4, json_tokener_continue, 1},
  335. {"true", 5, 4, json_tokener_success, 1},
  336. {"null", 4, 4, json_tokener_continue, 1},
  337. {"null", 5, 4, json_tokener_success, 1},
  338. {"Infinity", 9, 8, json_tokener_success, 1},
  339. {"infinity", 9, 8, json_tokener_success, 1},
  340. {"-infinity", 10, 9, json_tokener_success, 1},
  341. {"infinity", 9, 0, json_tokener_error_parse_unexpected, 1, JSON_TOKENER_STRICT},
  342. {"-infinity", 10, 1, json_tokener_error_parse_unexpected, 1, JSON_TOKENER_STRICT},
  343. {"inf", 3, 3, json_tokener_continue, 0},
  344. {"inity", 6, 5, json_tokener_success, 1},
  345. {"-inf", 4, 4, json_tokener_continue, 0},
  346. {"inity", 6, 5, json_tokener_success, 1},
  347. {"i", 1, 1, json_tokener_continue, 0},
  348. {"n", 1, 1, json_tokener_continue, 0},
  349. {"f", 1, 1, json_tokener_continue, 0},
  350. {"i", 1, 1, json_tokener_continue, 0},
  351. {"n", 1, 1, json_tokener_continue, 0},
  352. {"i", 1, 1, json_tokener_continue, 0},
  353. {"t", 1, 1, json_tokener_continue, 0},
  354. {"y", 1, 1, json_tokener_continue, 0},
  355. {"", 1, 0, json_tokener_success, 1},
  356. {"-", 1, 1, json_tokener_continue, 0},
  357. {"inf", 3, 3, json_tokener_continue, 0},
  358. {"ini", 3, 3, json_tokener_continue, 0},
  359. {"ty", 3, 2, json_tokener_success, 1},
  360. {"-", 1, 1, json_tokener_continue, 0},
  361. {"i", 1, 1, json_tokener_continue, 0},
  362. {"nfini", 5, 5, json_tokener_continue, 0},
  363. {"ty", 3, 2, json_tokener_success, 1},
  364. {"-i", 2, 2, json_tokener_continue, 0},
  365. {"nfinity", 8, 7, json_tokener_success, 1},
  366. {"InfinityX", 10, 8, json_tokener_success, 0},
  367. {"X", 1, 0, json_tokener_error_parse_unexpected, 1},
  368. {"Infinity1234", 13, 8, json_tokener_success, 0},
  369. {"1234", 5, 4, json_tokener_success, 1},
  370. {"Infinity9999", 8, 8, json_tokener_continue, 0},
  371. /* returns the Infinity loaded up by the previous call: */
  372. {"1234", 5, 0, json_tokener_success, 0},
  373. {"1234", 5, 4, json_tokener_success, 1},
  374. /* offset=1 because "n" is the start of "null". hmm... */
  375. {"noodle", 7, 1, json_tokener_error_parse_null, 1},
  376. /* offset=2 because "na" is the start of "nan". hmm... */
  377. {"naodle", 7, 2, json_tokener_error_parse_null, 1},
  378. /* offset=2 because "tr" is the start of "true". hmm... */
  379. {"track", 6, 2, json_tokener_error_parse_boolean, 1},
  380. {"fail", 5, 2, json_tokener_error_parse_boolean, 1},
  381. /* Although they may initially look like they should fail,
  382. * the next few tests check that parsing multiple sequential
  383. * json objects in the input works as expected
  384. */
  385. {"null123", 8, 4, json_tokener_success, 0},
  386. {&"null123"[4], 4, 3, json_tokener_success, 1},
  387. {"nullx", 6, 4, json_tokener_success, 0},
  388. {&"nullx"[4], 2, 0, json_tokener_error_parse_unexpected, 1},
  389. {"{\"a\":1}{\"b\":2}", 15, 7, json_tokener_success, 0},
  390. {&"{\"a\":1}{\"b\":2}"[7], 8, 7, json_tokener_success, 1},
  391. /*
  392. * Though this may seem invalid at first glance, it
  393. * parses as three separate numbers, 2015, -1 and -15
  394. * Of course, simply pasting together a stream of arbitrary
  395. * positive numbers won't work, since there'll be no way to
  396. * tell where in e.g. "2015015" the next number stats, so
  397. * a reliably parsable stream must not include json_type_int
  398. * or json_type_double objects without some other delimiter.
  399. * e.g. whitespace
  400. */
  401. {&"2015-01-15"[0], 11, 4, json_tokener_success, 1},
  402. {&"2015-01-15"[4], 7, 3, json_tokener_success, 1},
  403. {&"2015-01-15"[7], 4, 3, json_tokener_success, 1},
  404. {&"2015 01 15"[0], 11, 5, json_tokener_success, 1},
  405. {&"2015 01 15"[4], 7, 4, json_tokener_success, 1},
  406. {&"2015 01 15"[7], 4, 3, json_tokener_success, 1},
  407. /* Strings have a well defined end point, so we can stop at the quote */
  408. {"\"blue\"", -1, -1, json_tokener_success, 0},
  409. /* Check each of the escape sequences defined by the spec */
  410. {"\"\\\"\"", -1, -1, json_tokener_success, 0},
  411. {"\"\\\\\"", -1, -1, json_tokener_success, 0},
  412. {"\"\\b\"", -1, -1, json_tokener_success, 0},
  413. {"\"\\f\"", -1, -1, json_tokener_success, 0},
  414. {"\"\\n\"", -1, -1, json_tokener_success, 0},
  415. {"\"\\r\"", -1, -1, json_tokener_success, 0},
  416. {"\"\\t\"", -1, -1, json_tokener_success, 0},
  417. {"\"\\/\"", -1, -1, json_tokener_success, 0},
  418. // Escaping a forward slash is optional
  419. {"\"/\"", -1, -1, json_tokener_success, 0},
  420. /* Check wrong escape sequences */
  421. {"\"\\a\"", -1, 2, json_tokener_error_parse_string, 1},
  422. /* Check '\'' in strict model */
  423. {"\'foo\'", -1, 0, json_tokener_error_parse_unexpected, 1, JSON_TOKENER_STRICT},
  424. /* Parse array/object */
  425. {"[1,2,3]", -1, -1, json_tokener_success, 0},
  426. {"[1,2,3}", -1, 6, json_tokener_error_parse_array, 1},
  427. {"{\"a\"}", -1, 4, json_tokener_error_parse_object_key_sep, 1},
  428. {"{\"a\":1]", -1, 6, json_tokener_error_parse_object_value_sep, 1},
  429. {"{\"a\"::1}", -1, 5, json_tokener_error_parse_unexpected, 1},
  430. {"{\"a\":}", -1, 5, json_tokener_error_parse_unexpected, 1},
  431. {"{\"a\":1,\"a\":2}", -1, -1, json_tokener_success, 1},
  432. {"\"a\":1}", -1, 3, json_tokener_success, 1},
  433. {"{\"a\":1", -1, -1, json_tokener_continue, 1},
  434. {"[,]", -1, 1, json_tokener_error_parse_unexpected, 1},
  435. {"[,1]", -1, 1, json_tokener_error_parse_unexpected, 1},
  436. /* This behaviour doesn't entirely follow the json spec, but until we have
  437. * a way to specify how strict to be we follow Postel's Law and be liberal
  438. * in what we accept (up to a point).
  439. */
  440. {"[1,2,3,]", -1, -1, json_tokener_success, 0},
  441. {"[1,2,,3,]", -1, 5, json_tokener_error_parse_unexpected, 0},
  442. {"[1,2,3,]", -1, 7, json_tokener_error_parse_unexpected, 1, JSON_TOKENER_STRICT},
  443. {"{\"a\":1,}", -1, 7, json_tokener_error_parse_unexpected, 1, JSON_TOKENER_STRICT},
  444. // utf-8 test
  445. // acsll encoding
  446. {"\x22\x31\x32\x33\x61\x73\x63\x24\x25\x26\x22", -1, -1, json_tokener_success, 1,
  447. JSON_TOKENER_VALIDATE_UTF8},
  448. {"\x22\x31\x32\x33\x61\x73\x63\x24\x25\x26\x22", -1, -1, json_tokener_success, 1},
  449. // utf-8 encoding
  450. {"\x22\xe4\xb8\x96\xe7\x95\x8c\x22", -1, -1, json_tokener_success, 1,
  451. JSON_TOKENER_VALIDATE_UTF8},
  452. {"\x22\xe4\xb8", -1, 3, json_tokener_error_parse_utf8_string, 0, JSON_TOKENER_VALIDATE_UTF8},
  453. {"\x96\xe7\x95\x8c\x22", -1, 0, json_tokener_error_parse_utf8_string, 1,
  454. JSON_TOKENER_VALIDATE_UTF8},
  455. {"\x22\xe4\xb8\x96\xe7\x95\x8c\x22", -1, -1, json_tokener_success, 1},
  456. {"\x22\xcf\x80\xcf\x86\x22", -1, -1, json_tokener_success, 1, JSON_TOKENER_VALIDATE_UTF8},
  457. {"\x22\xf0\xa5\x91\x95\x22", -1, -1, json_tokener_success, 1, JSON_TOKENER_VALIDATE_UTF8},
  458. // wrong utf-8 encoding
  459. {"\x22\xe6\x9d\x4e\x22", -1, 3, json_tokener_error_parse_utf8_string, 1,
  460. JSON_TOKENER_VALIDATE_UTF8},
  461. {"\x22\xe6\x9d\x4e\x22", -1, 5, json_tokener_success, 1},
  462. // GBK encoding
  463. {"\x22\xc0\xee\xc5\xf4\x22", -1, 2, json_tokener_error_parse_utf8_string, 1,
  464. JSON_TOKENER_VALIDATE_UTF8},
  465. {"\x22\xc0\xee\xc5\xf4\x22", -1, 6, json_tokener_success, 1},
  466. // char after space
  467. {"\x20\x20\x22\xe4\xb8\x96\x22", -1, -1, json_tokener_success, 1, JSON_TOKENER_VALIDATE_UTF8},
  468. {"\x20\x20\x81\x22\xe4\xb8\x96\x22", -1, 2, json_tokener_error_parse_utf8_string, 1,
  469. JSON_TOKENER_VALIDATE_UTF8},
  470. {"\x5b\x20\x81\x31\x5d", -1, 2, json_tokener_error_parse_utf8_string, 1,
  471. JSON_TOKENER_VALIDATE_UTF8},
  472. // char in state inf
  473. {"\x49\x6e\x66\x69\x6e\x69\x74\x79", 9, 8, json_tokener_success, 1},
  474. {"\x49\x6e\x66\x81\x6e\x69\x74\x79", -1, 3, json_tokener_error_parse_utf8_string, 1,
  475. JSON_TOKENER_VALIDATE_UTF8},
  476. // char in escape unicode
  477. {"\x22\x5c\x75\x64\x38\x35\x35\x5c\x75\x64\x63\x35\x35\x22", 15, 14, json_tokener_success, 1,
  478. JSON_TOKENER_VALIDATE_UTF8},
  479. {"\x22\x5c\x75\x64\x38\x35\x35\xc0\x75\x64\x63\x35\x35\x22", -1, 8,
  480. json_tokener_error_parse_utf8_string, 1, JSON_TOKENER_VALIDATE_UTF8},
  481. {"\x22\x5c\x75\x64\x30\x30\x33\x31\xc0\x22", -1, 9, json_tokener_error_parse_utf8_string, 1,
  482. JSON_TOKENER_VALIDATE_UTF8},
  483. // char in number
  484. {"\x31\x31\x81\x31\x31", -1, 2, json_tokener_error_parse_utf8_string, 1,
  485. JSON_TOKENER_VALIDATE_UTF8},
  486. // char in object
  487. {"\x7b\x22\x31\x81\x22\x3a\x31\x7d", -1, 3, json_tokener_error_parse_utf8_string, 1,
  488. JSON_TOKENER_VALIDATE_UTF8},
  489. {NULL, -1, -1, json_tokener_success, 0},
  490. };
  491. static void test_incremental_parse()
  492. {
  493. json_object *new_obj;
  494. enum json_tokener_error jerr;
  495. struct json_tokener *tok;
  496. const char *string_to_parse;
  497. int ii;
  498. int num_ok, num_error;
  499. num_ok = 0;
  500. num_error = 0;
  501. printf("Starting incremental tests.\n");
  502. printf("Note: quotes and backslashes seen in the output here are literal values passed\n");
  503. printf(" to the parse functions. e.g. this is 4 characters: \"\\f\"\n");
  504. string_to_parse = "{ \"foo"; /* } */
  505. printf("json_tokener_parse(%s) ... ", string_to_parse);
  506. new_obj = json_tokener_parse(string_to_parse);
  507. if (new_obj == NULL)
  508. puts("got error as expected");
  509. /* test incremental parsing in various forms */
  510. tok = json_tokener_new();
  511. for (ii = 0; incremental_steps[ii].string_to_parse != NULL; ii++)
  512. {
  513. int this_step_ok = 0;
  514. struct incremental_step *step = &incremental_steps[ii];
  515. int length = step->length;
  516. size_t expected_char_offset;
  517. json_tokener_set_flags(tok, step->tok_flags);
  518. if (length == -1)
  519. length = (int)strlen(step->string_to_parse);
  520. if (step->char_offset == -1)
  521. expected_char_offset = length;
  522. else
  523. expected_char_offset = step->char_offset;
  524. printf("json_tokener_parse_ex(tok, %-12s, %3d) ... ", step->string_to_parse,
  525. length);
  526. new_obj = json_tokener_parse_ex(tok, step->string_to_parse, length);
  527. jerr = json_tokener_get_error(tok);
  528. if (step->expected_error != json_tokener_success)
  529. {
  530. if (new_obj != NULL)
  531. printf("ERROR: invalid object returned: %s\n",
  532. json_object_to_json_string(new_obj));
  533. else if (jerr != step->expected_error)
  534. printf("ERROR: got wrong error: %s\n",
  535. json_tokener_error_desc(jerr));
  536. else if (json_tokener_get_parse_end(tok) != expected_char_offset)
  537. printf("ERROR: wrong char_offset %zu != expected %zu\n",
  538. json_tokener_get_parse_end(tok), expected_char_offset);
  539. else
  540. {
  541. printf("OK: got correct error: %s\n",
  542. json_tokener_error_desc(jerr));
  543. this_step_ok = 1;
  544. }
  545. }
  546. else
  547. {
  548. if (new_obj == NULL &&
  549. !(step->length >= 4 && strncmp(step->string_to_parse, "null", 4) == 0))
  550. printf("ERROR: expected valid object, instead: %s\n",
  551. json_tokener_error_desc(jerr));
  552. else if (json_tokener_get_parse_end(tok) != expected_char_offset)
  553. printf("ERROR: wrong char_offset %zu != expected %zu\n",
  554. json_tokener_get_parse_end(tok), expected_char_offset);
  555. else
  556. {
  557. printf("OK: got object of type [%s]: %s\n",
  558. json_type_to_name(json_object_get_type(new_obj)),
  559. json_object_to_json_string(new_obj));
  560. this_step_ok = 1;
  561. }
  562. }
  563. if (new_obj)
  564. json_object_put(new_obj);
  565. if (step->reset_tokener & 1)
  566. json_tokener_reset(tok);
  567. if (this_step_ok)
  568. num_ok++;
  569. else
  570. num_error++;
  571. }
  572. json_tokener_free(tok);
  573. printf("End Incremental Tests OK=%d ERROR=%d\n", num_ok, num_error);
  574. }