implemented a non-recursive parser

parent 27cf05af
...@@ -431,6 +431,18 @@ class parser ...@@ -431,6 +431,18 @@ class parser
bool sax_parse_internal(json_sax_t* sax) bool sax_parse_internal(json_sax_t* sax)
{ {
// two values for the structured values
enum class parse_state_t { array_value, object_value };
// stack to remember the hieararchy of structured values we are parsing
std::vector<parse_state_t> states;
// value to avoid a goto (see comment where set to true)
bool skip_to_state_evaluation = false;
while (true)
{
if (not skip_to_state_evaluation)
{
// invariant: get_token() was called before each iteration
switch (last_token) switch (last_token)
{ {
case token_type::begin_object: case token_type::begin_object:
...@@ -444,14 +456,15 @@ class parser ...@@ -444,14 +456,15 @@ class parser
get_token(); get_token();
// closing } -> we are done // closing } -> we are done
if (JSON_UNLIKELY(last_token == token_type::end_object)) if (last_token == token_type::end_object)
{
if (not sax->end_object())
{ {
return sax->end_object(); return false;
}
break;
} }
// parse values
while (true)
{
// parse key // parse key
if (JSON_UNLIKELY(last_token != token_type::value_string)) if (JSON_UNLIKELY(last_token != token_type::value_string))
{ {
...@@ -476,36 +489,14 @@ class parser ...@@ -476,36 +489,14 @@ class parser
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator))); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
} }
// parse value // remember we are now inside an object
get_token(); states.push_back(parse_state_t::object_value);
if (not sax_parse_internal(sax))
{
return false;
}
// comma -> next value // parse values
get_token();
if (last_token == token_type::value_separator)
{
get_token(); get_token();
continue; continue;
} }
// closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{
return sax->end_object();
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
case token_type::begin_array: case token_type::begin_array:
{ {
if (not sax->start_array()) if (not sax->start_array())
...@@ -519,38 +510,18 @@ class parser ...@@ -519,38 +510,18 @@ class parser
// closing ] -> we are done // closing ] -> we are done
if (last_token == token_type::end_array) if (last_token == token_type::end_array)
{ {
return sax->end_array(); if (not sax->end_array())
}
// parse values
while (true)
{
// parse value
if (not sax_parse_internal(sax))
{ {
return false; return false;
} }
break;
// comma -> next value
get_token();
if (last_token == token_type::value_separator)
{
get_token();
continue;
} }
// closing ] // remember we are now inside an array
if (JSON_LIKELY(last_token == token_type::end_array)) states.push_back(parse_state_t::array_value);
{
return sax->end_array(); // parse values (no need to call get_token)
} continue;
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
}
}
} }
case token_type::value_float: case token_type::value_float:
...@@ -565,38 +536,66 @@ class parser ...@@ -565,38 +536,66 @@ class parser
} }
else else
{ {
return sax->number_float(res, m_lexer.move_string()); if (not sax->number_float(res, m_lexer.move_string()))
{
return false;
}
break;
} }
} }
case token_type::literal_false: case token_type::literal_false:
{ {
return sax->boolean(false); if (not sax->boolean(false))
{
return false;
}
break;
} }
case token_type::literal_null: case token_type::literal_null:
{ {
return sax->null(); if (not sax->null())
{
return false;
}
break;
} }
case token_type::literal_true: case token_type::literal_true:
{ {
return sax->boolean(true); if (not sax->boolean(true))
{
return false;
}
break;
} }
case token_type::value_integer: case token_type::value_integer:
{ {
return sax->number_integer(m_lexer.get_number_integer()); if (not sax->number_integer(m_lexer.get_number_integer()))
{
return false;
}
break;
} }
case token_type::value_string: case token_type::value_string:
{ {
return sax->string(m_lexer.move_string()); if (not sax->string(m_lexer.move_string()))
{
return false;
}
break;
} }
case token_type::value_unsigned: case token_type::value_unsigned:
{ {
return sax->number_unsigned(m_lexer.get_number_unsigned()); if (not sax->number_unsigned(m_lexer.get_number_unsigned()))
{
return false;
}
break;
} }
case token_type::parse_error: case token_type::parse_error:
...@@ -615,6 +614,123 @@ class parser ...@@ -615,6 +614,123 @@ class parser
} }
} }
} }
else
{
skip_to_state_evaluation = false;
}
// we reached this line after we successfully parsed a value
if (states.empty())
{
// empty stack: we reached the end of the hieararchy: done
return true;
}
else
{
get_token();
switch (states.back())
{
case parse_state_t::array_value:
{
// comma -> next value
if (last_token == token_type::value_separator)
{
// parse a new value
get_token();
continue;
}
// closing ]
if (JSON_LIKELY(last_token == token_type::end_array))
{
if (not sax->end_array())
{
return false;
}
// We are done with this array. Before we can parse
// a new value, we need to evaluate the new state
// first. By setting skip_to_state_evaluation to
// false, we are effectively jumping to the
// beginning of this switch.
assert(not states.empty());
states.pop_back();
skip_to_state_evaluation = true;
continue;
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
}
}
case parse_state_t::object_value:
{
// comma -> next value
if (last_token == token_type::value_separator)
{
get_token();
// parse key
if (JSON_UNLIKELY(last_token != token_type::value_string))
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string)));
}
else
{
if (not sax->key(m_lexer.move_string()))
{
return false;
}
}
// parse separator (:)
get_token();
if (JSON_UNLIKELY(last_token != token_type::name_separator))
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
}
// parse values
get_token();
continue;
}
// closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{
if (not sax->end_object())
{
return false;
}
// We are done with this object. Before we can
// parse a new value, we need to evaluate the new
// state first. By setting skip_to_state_evaluation
// to false, we are effectively jumping to the
// beginning of this switch.
assert(not states.empty());
states.pop_back();
skip_to_state_evaluation = true;
continue;
}
else
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
}
}
}
/// get next token from lexer /// get next token from lexer
token_type get_token() token_type get_token()
......
...@@ -3941,6 +3941,18 @@ class parser ...@@ -3941,6 +3941,18 @@ class parser
bool sax_parse_internal(json_sax_t* sax) bool sax_parse_internal(json_sax_t* sax)
{ {
// two values for the structured values
enum class parse_state_t { array_value, object_value };
// stack to remember the hieararchy of structured values we are parsing
std::vector<parse_state_t> states;
// value to avoid a goto (see comment where set to true)
bool skip_to_tail = false;
while (true)
{
if (not skip_to_tail)
{
// invariant: get_token() was called before each iteration
switch (last_token) switch (last_token)
{ {
case token_type::begin_object: case token_type::begin_object:
...@@ -3954,14 +3966,15 @@ class parser ...@@ -3954,14 +3966,15 @@ class parser
get_token(); get_token();
// closing } -> we are done // closing } -> we are done
if (JSON_UNLIKELY(last_token == token_type::end_object)) if (last_token == token_type::end_object)
{ {
return sax->end_object(); if (not sax->end_object())
{
return false;
}
break;
} }
// parse values
while (true)
{
// parse key // parse key
if (JSON_UNLIKELY(last_token != token_type::value_string)) if (JSON_UNLIKELY(last_token != token_type::value_string))
{ {
...@@ -3986,65 +3999,149 @@ class parser ...@@ -3986,65 +3999,149 @@ class parser
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator))); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
} }
// parse value // parse values
get_token(); get_token();
if (not sax_parse_internal(sax)) states.push_back(parse_state_t::object_value);
continue;
}
case token_type::begin_array:
{
if (not sax->start_array())
{ {
return false; return false;
} }
// comma -> next value // read next token
get_token(); get_token();
if (last_token == token_type::value_separator)
// closing ] -> we are done
if (last_token == token_type::end_array)
{ {
get_token(); if (not sax->end_array())
{
return false;
}
break;
}
// parse values (no need to call get_token)
states.push_back(parse_state_t::array_value);
continue; continue;
} }
// closing } case token_type::value_float:
if (JSON_LIKELY(last_token == token_type::end_object))
{ {
return sax->end_object(); const auto res = m_lexer.get_number_float();
}
else if (JSON_UNLIKELY(not std::isfinite(res)))
{ {
return sax->parse_error(m_lexer.get_position(), return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(), m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object))); out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'"));
}
else
{
if (not sax->number_float(res, m_lexer.move_string()))
{
return false;
}
break;
}
}
case token_type::literal_false:
{
if (not sax->boolean(false))
{
return false;
}
break;
} }
case token_type::literal_null:
{
if (not sax->null())
{
return false;
} }
break;
} }
case token_type::begin_array: case token_type::literal_true:
{ {
if (not sax->start_array()) if (not sax->boolean(true))
{ {
return false; return false;
} }
break;
}
// read next token case token_type::value_integer:
get_token(); {
if (not sax->number_integer(m_lexer.get_number_integer()))
{
return false;
}
break;
}
// closing ] -> we are done case token_type::value_string:
if (last_token == token_type::end_array)
{ {
return sax->end_array(); if (not sax->string(m_lexer.move_string()))
{
return false;
}
break;
} }
// parse values case token_type::value_unsigned:
while (true)
{ {
// parse value if (not sax->number_unsigned(m_lexer.get_number_unsigned()))
if (not sax_parse_internal(sax))
{ {
return false; return false;
} }
break;
}
// comma -> next value case token_type::parse_error:
{
// using "uninitialized" to avoid "expected" message
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::uninitialized)));
}
default: // the last token was unexpected
{
return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value)));
}
}
}
else
{
skip_to_tail = false;
}
// we reached this line after we successfully parsed a value
if (states.empty())
{
// empty stack: we reached the end of the hieararchy: done
return true;
}
else
{
get_token(); get_token();
switch (states.back())
{
case parse_state_t::array_value:
{
// comma -> next value
if (last_token == token_type::value_separator) if (last_token == token_type::value_separator)
{ {
// parse a new value
get_token(); get_token();
continue; continue;
} }
...@@ -4052,7 +4149,20 @@ class parser ...@@ -4052,7 +4149,20 @@ class parser
// closing ] // closing ]
if (JSON_LIKELY(last_token == token_type::end_array)) if (JSON_LIKELY(last_token == token_type::end_array))
{ {
return sax->end_array(); if (not sax->end_array())
{
return false;
}
// We are done with this array. Before we can parse
// a new value, we need to evaluate the new state
// first. By setting skip_to_tail to false, we are
// effectively jumping to the beginning of this
// switch.
assert(not states.empty());
states.pop_back();
skip_to_tail = true;
continue;
} }
else else
{ {
...@@ -4061,67 +4171,69 @@ class parser ...@@ -4061,67 +4171,69 @@ class parser
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array))); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_array)));
} }
} }
}
case token_type::value_float: case parse_state_t::object_value:
{ {
const auto res = m_lexer.get_number_float(); // comma -> next value
if (last_token == token_type::value_separator)
{
get_token();
if (JSON_UNLIKELY(not std::isfinite(res))) // parse key
if (JSON_UNLIKELY(last_token != token_type::value_string))
{ {
return sax->parse_error(m_lexer.get_position(), return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(), m_lexer.get_token_string(),
out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'")); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::value_string)));
} }
else else
{ {
return sax->number_float(res, m_lexer.move_string()); if (not sax->key(m_lexer.move_string()))
}
}
case token_type::literal_false:
{ {
return sax->boolean(false); return false;
} }
case token_type::literal_null:
{
return sax->null();
} }
case token_type::literal_true: // parse separator (:)
get_token();
if (JSON_UNLIKELY(last_token != token_type::name_separator))
{ {
return sax->boolean(true); return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::name_separator)));
} }
case token_type::value_integer: // parse values
{ get_token();
return sax->number_integer(m_lexer.get_number_integer()); continue;
} }
case token_type::value_string: // closing }
if (JSON_LIKELY(last_token == token_type::end_object))
{ {
return sax->string(m_lexer.move_string()); if (not sax->end_object())
}
case token_type::value_unsigned:
{ {
return sax->number_unsigned(m_lexer.get_number_unsigned()); return false;
} }
case token_type::parse_error: // We are done with this object. Before we can
{ // parse a new value, we need to evaluate the new
// using "uninitialized" to avoid "expected" message // state first. By setting skip_to_tail to false,
return sax->parse_error(m_lexer.get_position(), // we are effectively jumping to the beginning of
m_lexer.get_token_string(), // this switch.
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::uninitialized))); assert(not states.empty());
states.pop_back();
skip_to_tail = true;
continue;
} }
else
default: // the last token was unexpected
{ {
return sax->parse_error(m_lexer.get_position(), return sax->parse_error(m_lexer.get_position(),
m_lexer.get_token_string(), m_lexer.get_token_string(),
parse_error::create(101, m_lexer.get_position(), exception_message(token_type::literal_or_value))); parse_error::create(101, m_lexer.get_position(), exception_message(token_type::end_object)));
}
}
}
} }
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment