Skip to content

Commit

Permalink
Lexy updated
Browse files Browse the repository at this point in the history
  • Loading branch information
facontidavide committed Sep 21, 2024
1 parent 68cf55e commit 37f8c73
Show file tree
Hide file tree
Showing 123 changed files with 1,989 additions and 783 deletions.
74 changes: 37 additions & 37 deletions 3rdparty/lexy/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
# Copyright (C) 2020-2024 Jonathan Müller and lexy contributors
# SPDX-License-Identifier: BSL-1.0

cmake_minimum_required(VERSION 3.8)
Expand All @@ -9,14 +9,49 @@ option(LEXY_FORCE_CPP17 "Whether or not lexy should use C++17 even if compil

add_subdirectory(src)

option(LEXY_ENABLE_INSTALL "whether or not to enable the install rule" ON)
if(LEXY_ENABLE_INSTALL)
include(CMakePackageConfigHelpers)
include(GNUInstallDirs)

install(TARGETS lexy lexy_core lexy_file lexy_unicode lexy_ext _lexy_base lexy_dev
EXPORT ${PROJECT_NAME}Targets
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})

install(EXPORT ${PROJECT_NAME}Targets
NAMESPACE foonathan::
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")

configure_package_config_file(
cmake/lexyConfig.cmake.in
"${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
INSTALL_DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")

# YYYY.MM.N1 is compatible with YYYY.MM.N2.
write_basic_package_version_file(
"${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake"
COMPATIBILITY SameMinorVersion)

install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")

install(DIRECTORY include/lexy include/lexy_ext
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.hpp")
endif()

if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
cmake_minimum_required(VERSION 3.18)
option(LEXY_BUILD_BENCHMARKS "whether or not benchmarks should be built" OFF)
option(LEXY_BUILD_EXAMPLES "whether or not examples should be built" ON)
option(LEXY_BUILD_TESTS "whether or not tests should be built" ON)
option(LEXY_BUILD_DOCS "whether or not docs should be built" OFF)
option(LEXY_BUILD_PACKAGE "whether or not the package should be built" ON)
option(LEXY_ENABLE_INSTALL "whether or not to enable the install rule" ON)

if(LEXY_BUILD_PACKAGE)
set(package_files include/ src/ cmake/ CMakeLists.txt LICENSE)
Expand All @@ -41,39 +76,4 @@ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
if(LEXY_BUILD_DOCS)
add_subdirectory(docs EXCLUDE_FROM_ALL)
endif()

if(LEXY_ENABLE_INSTALL)
include(CMakePackageConfigHelpers)
include(GNUInstallDirs)

install(TARGETS lexy lexy_core lexy_file lexy_unicode lexy_ext _lexy_base lexy_dev
EXPORT ${PROJECT_NAME}Targets
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})

install(EXPORT ${PROJECT_NAME}Targets
NAMESPACE foonathan::
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")

configure_package_config_file(
cmake/lexyConfig.cmake.in
"${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
INSTALL_DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")

# YYYY.MM.N1 is compatible with YYYY.MM.N2.
write_basic_package_version_file(
"${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake"
COMPATIBILITY SameMinorVersion)

install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")

install(DIRECTORY include/lexy include/lexy_ext
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.hpp")
endif()
endif()
5 changes: 3 additions & 2 deletions 3rdparty/lexy/README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,9 @@ Why should I use lexy over XYZ?::
http://boost-spirit.com/home/[Boost.Spirit]:::
The main difference: it is not a Boost library.
Otherwise, it is just a different implementation with a different flavor.
Use lexy if you like lexy more.
In addition, Boost.Spirit is quite old and doesn't support e.g. non-common ranges as input.
Boost.Spirit also eagerly creates attributes from the rules, which can lead to nested tuples/variants while lexy uses callbacks which enables zero-copy parsing directly into your own data structure.
However, lexy's grammar is more verbose and designed to parser bigger grammars instead of the small one-off rules that Boost.Spirit is good at.
https://github.com/taocpp/PEGTL[PEGTL]:::
PEGTL is very similar and was a big inspiration.
The biggest difference is that lexy uses an operator based DSL instead of inheriting from templated classes as PEGTL does;
Expand Down
2 changes: 1 addition & 1 deletion 3rdparty/lexy/cmake/lexyConfig.cmake.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
# Copyright (C) 2020-2024 Jonathan Müller and lexy contributors
# SPDX-License-Identifier: BSL-1.0

# lexy CMake configuration file.
Expand Down
2 changes: 1 addition & 1 deletion 3rdparty/lexy/include/lexy/_detail/any_ref.hpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors
// SPDX-License-Identifier: BSL-1.0

#ifndef LEXY_DETAIL_ANY_REF_HPP_INCLUDED
Expand Down
2 changes: 1 addition & 1 deletion 3rdparty/lexy/include/lexy/_detail/assert.hpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors
// SPDX-License-Identifier: BSL-1.0

#ifndef LEXY_DETAIL_ASSERT_HPP_INCLUDED
Expand Down
2 changes: 1 addition & 1 deletion 3rdparty/lexy/include/lexy/_detail/buffer_builder.hpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors
// SPDX-License-Identifier: BSL-1.0

#ifndef LEXY_DETAIL_BUFFER_BUILDER_HPP_INCLUDED
Expand Down
74 changes: 37 additions & 37 deletions 3rdparty/lexy/include/lexy/_detail/code_point.hpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors
// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors
// SPDX-License-Identifier: BSL-1.0

#ifndef LEXY_DETAIL_CODE_POINT_HPP_INCLUDED
Expand Down Expand Up @@ -133,9 +133,9 @@ enum class cp_error
template <typename Reader>
struct cp_result
{
char32_t cp;
cp_error error;
typename Reader::iterator end;
char32_t cp;
cp_error error;
typename Reader::marker end;
};

template <typename Reader>
Expand All @@ -144,16 +144,16 @@ constexpr cp_result<Reader> parse_code_point(Reader reader)
if constexpr (std::is_same_v<typename Reader::encoding, lexy::ascii_encoding>)
{
if (reader.peek() == Reader::encoding::eof())
return {{}, cp_error::eof, reader.position()};
return {{}, cp_error::eof, reader.current()};

auto cur = reader.peek();
reader.bump();

auto cp = static_cast<char32_t>(cur);
if (cp <= 0x7F)
return {cp, cp_error::success, reader.position()};
return {cp, cp_error::success, reader.current()};
else
return {cp, cp_error::out_of_range, reader.position()};
return {cp, cp_error::out_of_range, reader.current()};
}
else if constexpr (std::is_same_v<typename Reader::encoding, lexy::utf8_encoding> //
|| std::is_same_v<typename Reader::encoding, lexy::utf8_char_encoding>)
Expand All @@ -176,19 +176,19 @@ constexpr cp_result<Reader> parse_code_point(Reader reader)
{
// ASCII character.
reader.bump();
return {first, cp_error::success, reader.position()};
return {first, cp_error::success, reader.current()};
}
else if ((first & ~payload_cont) == pattern_cont)
{
return {{}, cp_error::leads_with_trailing, reader.position()};
return {{}, cp_error::leads_with_trailing, reader.current()};
}
else if ((first & ~payload_lead2) == pattern_lead2)
{
reader.bump();

auto second = uchar_t(reader.peek());
if ((second & ~payload_cont) != pattern_cont)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

auto result = char32_t(first & payload_lead2);
Expand All @@ -197,22 +197,22 @@ constexpr cp_result<Reader> parse_code_point(Reader reader)

// C0 and C1 are overlong ASCII.
if (first == 0xC0 || first == 0xC1)
return {result, cp_error::overlong_sequence, reader.position()};
return {result, cp_error::overlong_sequence, reader.current()};
else
return {result, cp_error::success, reader.position()};
return {result, cp_error::success, reader.current()};
}
else if ((first & ~payload_lead3) == pattern_lead3)
{
reader.bump();

auto second = uchar_t(reader.peek());
if ((second & ~payload_cont) != pattern_cont)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

auto third = uchar_t(reader.peek());
if ((third & ~payload_cont) != pattern_cont)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

auto result = char32_t(first & payload_lead3);
Expand All @@ -223,29 +223,29 @@ constexpr cp_result<Reader> parse_code_point(Reader reader)

auto cp = result;
if (0xD800 <= cp && cp <= 0xDFFF)
return {cp, cp_error::surrogate, reader.position()};
return {cp, cp_error::surrogate, reader.current()};
else if (first == 0xE0 && second < 0xA0)
return {cp, cp_error::overlong_sequence, reader.position()};
return {cp, cp_error::overlong_sequence, reader.current()};
else
return {cp, cp_error::success, reader.position()};
return {cp, cp_error::success, reader.current()};
}
else if ((first & ~payload_lead4) == pattern_lead4)
{
reader.bump();

auto second = uchar_t(reader.peek());
if ((second & ~payload_cont) != pattern_cont)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

auto third = uchar_t(reader.peek());
if ((third & ~payload_cont) != pattern_cont)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

auto fourth = uchar_t(reader.peek());
if ((fourth & ~payload_cont) != pattern_cont)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

auto result = char32_t(first & payload_lead4);
Expand All @@ -258,15 +258,15 @@ constexpr cp_result<Reader> parse_code_point(Reader reader)

auto cp = result;
if (cp > 0x10'FFFF)
return {cp, cp_error::out_of_range, reader.position()};
return {cp, cp_error::out_of_range, reader.current()};
else if (first == 0xF0 && second < 0x90)
return {cp, cp_error::overlong_sequence, reader.position()};
return {cp, cp_error::overlong_sequence, reader.current()};
else
return {cp, cp_error::success, reader.position()};
return {cp, cp_error::success, reader.current()};
}
else // FE or FF
{
return {{}, cp_error::eof, reader.position()};
return {{}, cp_error::eof, reader.current()};
}
}
else if constexpr (std::is_same_v<typename Reader::encoding, lexy::utf16_encoding>)
Expand All @@ -278,53 +278,53 @@ constexpr cp_result<Reader> parse_code_point(Reader reader)
constexpr auto pattern2 = 0b110111 << 10;

if (reader.peek() == Reader::encoding::eof())
return {{}, cp_error::eof, reader.position()};
return {{}, cp_error::eof, reader.current()};

auto first = char16_t(reader.peek());
if ((first & ~payload1) == pattern1)
{
reader.bump();
if (reader.peek() == Reader::encoding::eof())
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};

auto second = char16_t(reader.peek());
if ((second & ~payload2) != pattern2)
return {{}, cp_error::missing_trailing, reader.position()};
return {{}, cp_error::missing_trailing, reader.current()};
reader.bump();

// We've got a valid code point.
auto result = char32_t(first & payload1);
result <<= 10;
result |= char32_t(second & payload2);
result |= 0x10000;
return {result, cp_error::success, reader.position()};
return {result, cp_error::success, reader.current()};
}
else if ((first & ~payload2) == pattern2)
{
return {{}, cp_error::leads_with_trailing, reader.position()};
return {{}, cp_error::leads_with_trailing, reader.current()};
}
else
{
// Single code unit code point; always valid.
reader.bump();
return {first, cp_error::success, reader.position()};
return {first, cp_error::success, reader.current()};
}
}
else if constexpr (std::is_same_v<typename Reader::encoding, lexy::utf32_encoding>)
{
if (reader.peek() == Reader::encoding::eof())
return {{}, cp_error::eof, reader.position()};
return {{}, cp_error::eof, reader.current()};

auto cur = reader.peek();
reader.bump();

auto cp = cur;
if (cp > 0x10'FFFF)
return {cp, cp_error::out_of_range, reader.position()};
return {cp, cp_error::out_of_range, reader.current()};
else if (0xD800 <= cp && cp <= 0xDFFF)
return {cp, cp_error::surrogate, reader.position()};
return {cp, cp_error::surrogate, reader.current()};
else
return {cp, cp_error::success, reader.position()};
return {cp, cp_error::success, reader.current()};
}
else
{
Expand All @@ -341,15 +341,15 @@ constexpr void recover_code_point(Reader& reader, cp_result<Reader> result)
{
case cp_error::success:
// Consume the entire code point.
reader.set_position(result.end);
reader.reset(result.end);
break;
case cp_error::eof:
// We don't need to do anything to "recover" from EOF.
break;

case cp_error::leads_with_trailing:
// Invalid code unit, consume to recover.
LEXY_PRECONDITION(result.end == reader.position());
LEXY_PRECONDITION(result.end.position() == reader.position());
reader.bump();
break;

Expand All @@ -358,7 +358,7 @@ constexpr void recover_code_point(Reader& reader, cp_result<Reader> result)
case cp_error::out_of_range:
case cp_error::overlong_sequence:
// Consume all the invalid code units to recover.
reader.set_position(result.end);
reader.reset(result.end);
break;
}
}
Expand Down
Loading

0 comments on commit 37f8c73

Please sign in to comment.