json-lexer.c (2a96042a8da60b625cc9dbbdab3b03cd7586e34f) | json-lexer.c (852dfa76b85c5d23541377809aa4bcfb4fc037db) |
---|---|
1/* 2 * JSON lexer 3 * 4 * Copyright IBM, Corp. 2009 5 * 6 * Authors: 7 * Anthony Liguori <aliguori@us.ibm.com> 8 * --- 247 unchanged lines hidden (view full) --- 256 lexer->start_state = lexer->state = enable_interpolation 257 ? IN_START_INTERP : IN_START; 258 lexer->token = g_string_sized_new(3); 259 lexer->x = lexer->y = 0; 260} 261 262static void json_lexer_feed_char(JSONLexer *lexer, char ch, bool flush) 263{ | 1/* 2 * JSON lexer 3 * 4 * Copyright IBM, Corp. 2009 5 * 6 * Authors: 7 * Anthony Liguori <aliguori@us.ibm.com> 8 * --- 247 unchanged lines hidden (view full) --- 256 lexer->start_state = lexer->state = enable_interpolation 257 ? IN_START_INTERP : IN_START; 258 lexer->token = g_string_sized_new(3); 259 lexer->x = lexer->y = 0; 260} 261 262static void json_lexer_feed_char(JSONLexer *lexer, char ch, bool flush) 263{ |
264 int char_consumed, new_state; | 264 int new_state; 265 bool char_consumed = false; |
265 266 lexer->x++; 267 if (ch == '\n') { 268 lexer->x = 0; 269 lexer->y++; 270 } 271 | 266 267 lexer->x++; 268 if (ch == '\n') { 269 lexer->x = 0; 270 lexer->y++; 271 } 272 |
272 do { | 273 while (flush ? lexer->state != lexer->start_state : !char_consumed) { |
273 assert(lexer->state <= ARRAY_SIZE(json_lexer)); 274 new_state = json_lexer[lexer->state][(uint8_t)ch]; | 274 assert(lexer->state <= ARRAY_SIZE(json_lexer)); 275 new_state = json_lexer[lexer->state][(uint8_t)ch]; |
275 char_consumed = !TERMINAL_NEEDED_LOOKAHEAD(lexer->state, new_state); 276 if (char_consumed && !flush) { | 276 char_consumed = !flush 277 && !TERMINAL_NEEDED_LOOKAHEAD(lexer->state, new_state); 278 if (char_consumed) { |
277 g_string_append_c(lexer->token, ch); 278 } 279 280 switch (new_state) { 281 case JSON_LCURLY: 282 case JSON_RCURLY: 283 case JSON_LSQUARE: 284 case JSON_RSQUARE: --- 28 unchanged lines hidden (view full) --- 313 lexer->x, lexer->y); 314 g_string_truncate(lexer->token, 0); 315 lexer->state = lexer->start_state; 316 return; 317 default: 318 break; 319 } 320 lexer->state = new_state; | 279 g_string_append_c(lexer->token, ch); 280 } 281 282 switch (new_state) { 283 case JSON_LCURLY: 284 case JSON_RCURLY: 285 case JSON_LSQUARE: 286 case JSON_RSQUARE: --- 28 unchanged lines hidden (view full) --- 315 lexer->x, lexer->y); 316 g_string_truncate(lexer->token, 0); 317 lexer->state = lexer->start_state; 318 return; 319 default: 320 break; 321 } 322 lexer->state = new_state; |
321 } while (!char_consumed && !flush); | 323 } |
322 323 /* Do not let a single token grow to an arbitrarily large size, 324 * this is a security consideration. 325 */ 326 if (lexer->token->len > MAX_TOKEN_SIZE) { 327 json_message_process_token(lexer, lexer->token, lexer->state, 328 lexer->x, lexer->y); 329 g_string_truncate(lexer->token, 0); --- 7 unchanged lines hidden (view full) --- 337 338 for (i = 0; i < size; i++) { 339 json_lexer_feed_char(lexer, buffer[i], false); 340 } 341} 342 343void json_lexer_flush(JSONLexer *lexer) 344{ | 324 325 /* Do not let a single token grow to an arbitrarily large size, 326 * this is a security consideration. 327 */ 328 if (lexer->token->len > MAX_TOKEN_SIZE) { 329 json_message_process_token(lexer, lexer->token, lexer->state, 330 lexer->x, lexer->y); 331 g_string_truncate(lexer->token, 0); --- 7 unchanged lines hidden (view full) --- 339 340 for (i = 0; i < size; i++) { 341 json_lexer_feed_char(lexer, buffer[i], false); 342 } 343} 344 345void json_lexer_flush(JSONLexer *lexer) 346{ |
345 if (lexer->state != lexer->start_state) { 346 json_lexer_feed_char(lexer, 0, true); 347 } | 347 json_lexer_feed_char(lexer, 0, true); 348 assert(lexer->state == lexer->start_state); |
348 json_message_process_token(lexer, lexer->token, JSON_END_OF_INPUT, 349 lexer->x, lexer->y); 350} 351 352void json_lexer_destroy(JSONLexer *lexer) 353{ 354 g_string_free(lexer->token, true); 355} | 349 json_message_process_token(lexer, lexer->token, JSON_END_OF_INPUT, 350 lexer->x, lexer->y); 351} 352 353void json_lexer_destroy(JSONLexer *lexer) 354{ 355 g_string_free(lexer->token, true); 356} |