Skip to content

Commit

Permalink
Switch from double to long double as token's floating point represent…
Browse files Browse the repository at this point in the history
…ation.

Therer's room for it, since it's in a union with a string_view anyway.

See #202.
  • Loading branch information
tzlaine committed Nov 8, 2024
1 parent 028ad27 commit c1cc177
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 61 deletions.
49 changes: 15 additions & 34 deletions include/boost/parser/lexer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,7 @@ namespace boost { namespace parser {
struct none;

namespace detail {
// TODO: double -> long double.
enum class token_kind { no_value, string_view, long_long, double_ };
enum class token_kind { no_value, string_view, long_long, long_double };

enum class token_parsed_type {
ws,
Expand Down Expand Up @@ -165,8 +164,8 @@ namespace boost { namespace parser {
{
value_.ll_ = value;
}
constexpr token(int id, double value) :
value_(0ll), id_(id), kind_(detail::token_kind::double_)
constexpr token(int id, long double value) :
value_(0ll), id_(id), kind_(detail::token_kind::long_double)
{
value_.d_ = value;
}
Expand All @@ -193,13 +192,13 @@ namespace boost { namespace parser {
return value_.ll_;
}

constexpr bool has_double() const
constexpr bool has_long_double() const
{
return kind_ == detail::token_kind::double_;
return kind_ == detail::token_kind::long_double;
}
constexpr double get_double() const
constexpr long double get_long_double() const
{
BOOST_PARSER_DEBUG_ASSERT(kind_ == detail::token_kind::double_);
BOOST_PARSER_DEBUG_ASSERT(kind_ == detail::token_kind::long_double);
return value_.d_;
}

Expand All @@ -215,8 +214,8 @@ namespace boost { namespace parser {
return get_string_view() == rhs.get_string_view();
case detail::token_kind::long_long:
return get_long_long() == rhs.get_long_long();
case detail::token_kind::double_:
return get_double() == rhs.get_double();
case detail::token_kind::long_double:
return get_long_double() == rhs.get_long_double();
default: BOOST_PARSER_DEBUG_ASSERT(!"Error: invalid token kind.");
#if defined(__cpp_lib_unreachable)
std::unreachable();
Expand All @@ -235,7 +234,7 @@ namespace boost { namespace parser {
union value
{
long long ll_;
double d_;
long double d_;
string_view sv_;
} value_;
int id_;
Expand All @@ -255,8 +254,8 @@ namespace boost { namespace parser {
} else {
os << token.get_long_long();
}
} else if (token.has_double()) {
os << token.get_double();
} else if (token.has_long_double()) {
os << token.get_long_double();
} else {
os << "{no-value}";
}
Expand Down Expand Up @@ -336,24 +335,6 @@ namespace boost { namespace parser {
}
}

template<typename T>
token_kind token_kind_for()
{
if constexpr (std::is_same_v<T, none>) {
return token_kind::string_view;
} else if constexpr (std::is_same_v<T, long long>) {
return token_kind::long_long;
} else if constexpr (std::is_same_v<T, double>) {
return token_kind::double_;
} else {
static_assert(
!std::is_same_v<T, T>,
"The only valid types for the 'Value' template parameter "
"to 'lexer_token_spec' are 'none', 'long long', and "
"'double'.");
}
}

template<auto Ch, auto... Chs>
struct token_chars_spec
{
Expand Down Expand Up @@ -731,23 +712,23 @@ namespace boost { namespace parser {
wrapper<decltype(value)>{},
0,
numeric::parse_real(f, l, value));
return {id, (double)value};
return {id, (long double)value};
}
case token_parsed_type::double_: {
double value;
report_error(
wrapper<decltype(value)>{},
0,
numeric::parse_real(f, l, value));
return {id, (double)value};
return {id, (long double)value};
}
case token_parsed_type::long_double: {
long double value;
report_error(
wrapper<decltype(value)>{},
0,
numeric::parse_real(f, l, value));
return {id, (double)value};
return {id, value};
}
case token_parsed_type::ws:
default:
Expand Down
54 changes: 27 additions & 27 deletions test/lexer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ sheet alert_dialog
tok_t(bp::character_id, (long long)'{'),
tok_t((int)adobe_tokens::identifier, "dummy_value"),
tok_t(bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 42.0),
tok_t((int)adobe_tokens::number, (long double)42.0),
tok_t(bp::character_id, (long long)'}'),
tok_t(bp::character_id, (long long)';'),
tok_t(bp::character_id, (long long)'}')};
Expand Down Expand Up @@ -640,15 +640,15 @@ sheet image_size
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::identifier, "original_width"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 1600.0),
tok_t((int)adobe_tokens::number, (long double)1600.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "original_height"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 1200.0),
tok_t((int)adobe_tokens::number, (long double)1200.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "original_resolution"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 300.0),
tok_t((int)adobe_tokens::number, (long double)300.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "constant"),
tok_t((int)bp::character_id, (long long)':'),
Expand Down Expand Up @@ -714,13 +714,13 @@ sheet image_size
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "dim_width_percent"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "resample"),
tok_t((int)bp::character_id, (long long)'?'),
tok_t((int)adobe_tokens::identifier, "dim_width_percent"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "dim_height_pixels"),
tok_t((int)bp::character_id, (long long)':'),
Expand All @@ -737,21 +737,21 @@ sheet image_size
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "dim_height_percent"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "resample"),
tok_t((int)bp::character_id, (long long)'?'),
tok_t((int)adobe_tokens::identifier, "dim_height_percent"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "doc_width_inches"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::identifier, "original_doc_width"),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "doc_width_percent"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::lead_comment, R"(/*
Resolution must be initialized before width and height inches to allow proportions
Expand All @@ -767,7 +767,7 @@ sheet image_size
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "doc_height_percent"),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "auto_quality"),
tok_t((int)bp::character_id, (long long)':'),
Expand All @@ -789,13 +789,13 @@ sheet image_size
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::identifier, "original_doc_width"),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "doc_width_percent"),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "doc_width_inches"),
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::identifier, "original_doc_width"),
tok_t((int)bp::character_id, (long long)';'),
Expand All @@ -808,13 +808,13 @@ sheet image_size
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::identifier, "original_doc_height"),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "doc_height_percent"),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "doc_height_inches"),
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::identifier, "original_doc_height"),
tok_t((int)bp::character_id, (long long)';'),
Expand All @@ -831,17 +831,17 @@ sheet image_size
tok_t((int)bp::character_id, (long long)'@'),
tok_t((int)adobe_tokens::identifier, "draft"),
tok_t((int)bp::character_id, (long long)'?'),
tok_t((int)adobe_tokens::number, 1.0),
tok_t((int)adobe_tokens::number, (long double)1.0),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)bp::character_id, (long long)'('),
tok_t((int)adobe_tokens::identifier, "auto_quality"),
tok_t((int)adobe_tokens::eq_op, "=="),
tok_t((int)bp::character_id, (long long)'@'),
tok_t((int)adobe_tokens::identifier, "good"),
tok_t((int)bp::character_id, (long long)'?'),
tok_t((int)adobe_tokens::number, 1.5),
tok_t((int)adobe_tokens::number, (long double)1.5),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 2.0),
tok_t((int)adobe_tokens::number, (long double)2.0),
tok_t((int)bp::character_id, (long long)')'),
tok_t((int)bp::character_id, (long long)')'),
tok_t((int)bp::character_id, (long long)';'),
Expand All @@ -855,17 +855,17 @@ sheet image_size
tok_t((int)bp::character_id, (long long)'@'),
tok_t((int)adobe_tokens::identifier, "draft"),
tok_t((int)bp::character_id, (long long)'?'),
tok_t((int)adobe_tokens::number, 1.0),
tok_t((int)adobe_tokens::number, (long double)1.0),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)bp::character_id, (long long)'('),
tok_t((int)adobe_tokens::identifier, "auto_quality"),
tok_t((int)adobe_tokens::eq_op, "=="),
tok_t((int)bp::character_id, (long long)'@'),
tok_t((int)adobe_tokens::identifier, "good"),
tok_t((int)bp::character_id, (long long)'?'),
tok_t((int)adobe_tokens::number, 1.5),
tok_t((int)adobe_tokens::number, (long double)1.5),
tok_t((int)bp::character_id, (long long)':'),
tok_t((int)adobe_tokens::number, 2.0),
tok_t((int)adobe_tokens::number, (long double)2.0),
tok_t((int)bp::character_id, (long long)')'),
tok_t((int)bp::character_id, (long long)')'),
tok_t((int)bp::character_id, (long long)';'),
Expand All @@ -882,13 +882,13 @@ sheet image_size
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::identifier, "original_width"),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "dim_width_percent"),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "dim_width_pixels"),
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::identifier, "original_width"),
tok_t((int)bp::character_id, (long long)';'),
Expand All @@ -905,13 +905,13 @@ sheet image_size
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::identifier, "original_height"),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "dim_height_percent"),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "dim_height_pixels"),
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::number, 100.0),
tok_t((int)adobe_tokens::number, (long double)100.0),
tok_t((int)adobe_tokens::mul_op, "/"),
tok_t((int)adobe_tokens::identifier, "original_height"),
tok_t((int)bp::character_id, (long long)';'),
Expand Down Expand Up @@ -1031,7 +1031,7 @@ sheet image_size
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::identifier, "dim_height_pixels"),
tok_t((int)adobe_tokens::mul_op, "*"),
tok_t((int)adobe_tokens::number, 32.0),
tok_t((int)adobe_tokens::number, (long double)32.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "result"),
tok_t((int)adobe_tokens::define, "<=="),
Expand Down Expand Up @@ -1082,14 +1082,14 @@ sheet image_size
tok_t((int)adobe_tokens::identifier, "dim_width_pixels"),
tok_t((int)adobe_tokens::rel_op, "<"),
tok_t((int)bp::character_id, (long long)'='),
tok_t((int)adobe_tokens::number, 300000.0),
tok_t((int)adobe_tokens::number, (long double)300000.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)adobe_tokens::identifier, "height_max"),
tok_t((int)adobe_tokens::define, "<=="),
tok_t((int)adobe_tokens::identifier, "dim_height_pixels"),
tok_t((int)adobe_tokens::rel_op, "<"),
tok_t((int)bp::character_id, (long long)'='),
tok_t((int)adobe_tokens::number, 300000.0),
tok_t((int)adobe_tokens::number, (long double)300000.0),
tok_t((int)bp::character_id, (long long)';'),
tok_t((int)bp::character_id, (long long)'}')};

Expand Down

0 comments on commit c1cc177

Please sign in to comment.