Unverified Commit 08571408 by Niels Lohmann Committed by GitHub

Merge pull request #1950 from FrancoisChabot/issues/1457

templated input adapters
parents d7b032f5 a4266bbb
...@@ -24,6 +24,20 @@ namespace nlohmann ...@@ -24,6 +24,20 @@ namespace nlohmann
{ {
namespace detail namespace detail
{ {
/*!
@brief determine system byte order
@return true if and only if system's byte order is little endian
@note from https://stackoverflow.com/a/1001328/266378
*/
static bool little_endianess(int num = 1) noexcept
{
return *reinterpret_cast<char*>(&num) == 1;
}
/////////////////// ///////////////////
// binary reader // // binary reader //
/////////////////// ///////////////////
...@@ -31,7 +45,7 @@ namespace detail ...@@ -31,7 +45,7 @@ namespace detail
/*! /*!
@brief deserialization of CBOR, MessagePack, and UBJSON values @brief deserialization of CBOR, MessagePack, and UBJSON values
*/ */
template<typename BasicJsonType, typename SAX = json_sax_dom_parser<BasicJsonType>> template<typename BasicJsonType, typename InputAdapterType, typename SAX = json_sax_dom_parser<BasicJsonType>>
class binary_reader class binary_reader
{ {
using number_integer_t = typename BasicJsonType::number_integer_t; using number_integer_t = typename BasicJsonType::number_integer_t;
...@@ -47,10 +61,9 @@ class binary_reader ...@@ -47,10 +61,9 @@ class binary_reader
@param[in] adapter input adapter to read from @param[in] adapter input adapter to read from
*/ */
explicit binary_reader(input_adapter_t adapter) : ia(std::move(adapter)) explicit binary_reader(InputAdapterType&& adapter) : ia(std::move(adapter))
{ {
(void)detail::is_sax_static_asserts<SAX, BasicJsonType> {}; (void)detail::is_sax_static_asserts<SAX, BasicJsonType> {};
assert(ia);
} }
// make class move-only // make class move-only
...@@ -119,18 +132,6 @@ class binary_reader ...@@ -119,18 +132,6 @@ class binary_reader
return result; return result;
} }
/*!
@brief determine system byte order
@return true if and only if system's byte order is little endian
@note from https://stackoverflow.com/a/1001328/266378
*/
static constexpr bool little_endianess(int num = 1) noexcept
{
return *reinterpret_cast<char*>(&num) == 1;
}
private: private:
////////// //////////
// BSON // // BSON //
...@@ -2085,7 +2086,7 @@ class binary_reader ...@@ -2085,7 +2086,7 @@ class binary_reader
int get() int get()
{ {
++chars_read; ++chars_read;
return current = ia->get_character(); return current = ia.get_character();
} }
/*! /*!
...@@ -2273,7 +2274,7 @@ class binary_reader ...@@ -2273,7 +2274,7 @@ class binary_reader
private: private:
/// input adapter /// input adapter
input_adapter_t ia = nullptr; InputAdapterType ia;
/// the current character /// the current character
int current = std::char_traits<char>::eof(); int current = std::char_traits<char>::eof();
......
...@@ -22,19 +22,9 @@ namespace detail ...@@ -22,19 +22,9 @@ namespace detail
// lexer // // lexer //
/////////// ///////////
/*!
@brief lexical analysis
This class organizes the lexical analysis during JSON deserialization.
*/
template<typename BasicJsonType> template<typename BasicJsonType>
class lexer class lexer_base
{ {
using number_integer_t = typename BasicJsonType::number_integer_t;
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
using number_float_t = typename BasicJsonType::number_float_t;
using string_t = typename BasicJsonType::string_t;
public: public:
/// token types for the parser /// token types for the parser
enum class token_type enum class token_type
...@@ -75,9 +65,9 @@ class lexer ...@@ -75,9 +65,9 @@ class lexer
return "null literal"; return "null literal";
case token_type::value_string: case token_type::value_string:
return "string literal"; return "string literal";
case lexer::token_type::value_unsigned: case token_type::value_unsigned:
case lexer::token_type::value_integer: case token_type::value_integer:
case lexer::token_type::value_float: case token_type::value_float:
return "number literal"; return "number literal";
case token_type::begin_array: case token_type::begin_array:
return "'['"; return "'['";
...@@ -103,15 +93,31 @@ class lexer ...@@ -103,15 +93,31 @@ class lexer
// LCOV_EXCL_STOP // LCOV_EXCL_STOP
} }
} }
};
/*!
@brief lexical analysis
This class organizes the lexical analysis during JSON deserialization.
*/
template<typename BasicJsonType, typename InputAdapterType>
class lexer : public lexer_base<BasicJsonType>
{
using number_integer_t = typename BasicJsonType::number_integer_t;
using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
using number_float_t = typename BasicJsonType::number_float_t;
using string_t = typename BasicJsonType::string_t;
public:
using token_type = typename lexer_base<BasicJsonType>::token_type;
explicit lexer(detail::input_adapter_t&& adapter) explicit lexer(InputAdapterType&& adapter)
: ia(std::move(adapter)), decimal_point_char(get_decimal_point()) {} : ia(std::move(adapter)), decimal_point_char(get_decimal_point()) {}
// delete because of pointer members // delete because of pointer members
lexer(const lexer&) = delete; lexer(const lexer&) = delete;
lexer(lexer&&) = delete; lexer(lexer&&) = default;
lexer& operator=(lexer&) = delete; lexer& operator=(lexer&) = delete;
lexer& operator=(lexer&&) = delete; lexer& operator=(lexer&&) = default;
~lexer() = default; ~lexer() = default;
private: private:
...@@ -1256,7 +1262,7 @@ scan_number_done: ...@@ -1256,7 +1262,7 @@ scan_number_done:
} }
else else
{ {
current = ia->get_character(); current = ia.get_character();
} }
if (JSON_HEDLEY_LIKELY(current != std::char_traits<char>::eof())) if (JSON_HEDLEY_LIKELY(current != std::char_traits<char>::eof()))
...@@ -1480,7 +1486,7 @@ scan_number_done: ...@@ -1480,7 +1486,7 @@ scan_number_done:
private: private:
/// input adapter /// input adapter
detail::input_adapter_t ia = nullptr; InputAdapterType ia;
/// the current character /// the current character
std::char_traits<char>::int_type current = std::char_traits<char>::eof(); std::char_traits<char>::int_type current = std::char_traits<char>::eof();
......
...@@ -24,44 +24,45 @@ namespace detail ...@@ -24,44 +24,45 @@ namespace detail
// parser // // parser //
//////////// ////////////
enum class parse_event_t : uint8_t
{
/// the parser read `{` and started to process a JSON object
object_start,
/// the parser read `}` and finished processing a JSON object
object_end,
/// the parser read `[` and started to process a JSON array
array_start,
/// the parser read `]` and finished processing a JSON array
array_end,
/// the parser read a key of a value in an object
key,
/// the parser finished reading a JSON value
value
};
template<typename BasicJsonType>
using parser_callback_t =
std::function<bool(int depth, parse_event_t event, BasicJsonType& parsed)>;
/*! /*!
@brief syntax analysis @brief syntax analysis
This class implements a recursive descent parser. This class implements a recursive descent parser.
*/ */
template<typename BasicJsonType> template<typename BasicJsonType, typename InputAdapterType>
class parser class parser
{ {
using number_integer_t = typename BasicJsonType::number_integer_t; using number_integer_t = typename BasicJsonType::number_integer_t;
using number_unsigned_t = typename BasicJsonType::number_unsigned_t; using number_unsigned_t = typename BasicJsonType::number_unsigned_t;
using number_float_t = typename BasicJsonType::number_float_t; using number_float_t = typename BasicJsonType::number_float_t;
using string_t = typename BasicJsonType::string_t; using string_t = typename BasicJsonType::string_t;
using lexer_t = lexer<BasicJsonType>; using lexer_t = lexer<BasicJsonType, InputAdapterType>;
using token_type = typename lexer_t::token_type; using token_type = typename lexer_t::token_type;
public: public:
enum class parse_event_t : uint8_t
{
/// the parser read `{` and started to process a JSON object
object_start,
/// the parser read `}` and finished processing a JSON object
object_end,
/// the parser read `[` and started to process a JSON array
array_start,
/// the parser read `]` and finished processing a JSON array
array_end,
/// the parser read a key of a value in an object
key,
/// the parser finished reading a JSON value
value
};
using parser_callback_t =
std::function<bool(int depth, parse_event_t event, BasicJsonType& parsed)>;
/// a parser reading from an input adapter /// a parser reading from an input adapter
explicit parser(detail::input_adapter_t&& adapter, explicit parser(InputAdapterType&& adapter,
const parser_callback_t cb = nullptr, const parser_callback_t<BasicJsonType> cb = nullptr,
const bool allow_exceptions_ = true) const bool allow_exceptions_ = true)
: callback(cb), m_lexer(std::move(adapter)), allow_exceptions(allow_exceptions_) : callback(cb), m_lexer(std::move(adapter)), allow_exceptions(allow_exceptions_)
{ {
...@@ -486,7 +487,7 @@ class parser ...@@ -486,7 +487,7 @@ class parser
private: private:
/// callback function /// callback function
const parser_callback_t callback = nullptr; const parser_callback_t<BasicJsonType> callback = nullptr;
/// the type of the last read token /// the type of the last read token
token_type last_token = token_type::uninitialized; token_type last_token = token_type::uninitialized;
/// the lexer /// the lexer
......
...@@ -1538,7 +1538,7 @@ class binary_writer ...@@ -1538,7 +1538,7 @@ class binary_writer
private: private:
/// whether we can assume little endianess /// whether we can assume little endianess
const bool is_little_endian = binary_reader<BasicJsonType>::little_endianess(); const bool is_little_endian = little_endianess();
/// the output /// the output
output_adapter_t<CharType> oa = nullptr; output_adapter_t<CharType> oa = nullptr;
......
...@@ -40,7 +40,8 @@ namespace ...@@ -40,7 +40,8 @@ namespace
json::lexer::token_type scan_string(const char* s); json::lexer::token_type scan_string(const char* s);
json::lexer::token_type scan_string(const char* s) json::lexer::token_type scan_string(const char* s)
{ {
return json::lexer(nlohmann::detail::input_adapter(s)).scan(); auto ia = nlohmann::detail::input_adapter(s);
return nlohmann::detail::lexer<json, decltype(ia)>(std::move(ia)).scan();
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment