From 7f2fb16afbec1c9739968d2fb8e26e3903ee62a9 Mon Sep 17 00:00:00 2001 From: Jamie Willis Date: Mon, 25 Apr 2022 21:03:15 +0100 Subject: [PATCH] Fixed infinite loop in whitespace with no comments in `LanguageDef` (#119) * Fixed bug where whitespace with no comments does not advance program counter * Added unit test --- .../internal/machine/instructions/TokenInstrs.scala | 9 +++++++-- src/test/scala/parsley/TokeniserTests.scala | 6 ++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/main/scala/parsley/internal/machine/instructions/TokenInstrs.scala b/src/main/scala/parsley/internal/machine/instructions/TokenInstrs.scala index 2519426a8..ace671941 100644 --- a/src/main/scala/parsley/internal/machine/instructions/TokenInstrs.scala +++ b/src/main/scala/parsley/internal/machine/instructions/TokenInstrs.scala @@ -77,8 +77,13 @@ private [instructions] abstract class WhiteSpaceLike(start: String, end: String, else ctx.pushAndContinue(()) } + final def spacesAndContinue(ctx: Context): Unit = { + spaces(ctx) + ctx.pushAndContinue(()) + } + private final val impl = { - if (!lineAllowed && !multiAllowed) spaces(_) + if (!lineAllowed && !multiAllowed) spacesAndContinue(_) else if (!lineAllowed) multisOnly(_) else if (!multiAllowed) singlesOnly(_) else singlesAndMultis(_) @@ -235,4 +240,4 @@ private [internal] final class TokenMaxOp(operator: String, _ops: Set[String]) e // $COVERAGE-OFF$ override def toString: String = s"TokenMaxOp(${operator})" // $COVERAGE-ON$ -} \ No newline at end of file +} diff --git a/src/test/scala/parsley/TokeniserTests.scala b/src/test/scala/parsley/TokeniserTests.scala index 8d05b206a..ff1d22c2d 100644 --- a/src/test/scala/parsley/TokeniserTests.scala +++ b/src/test/scala/parsley/TokeniserTests.scala @@ -397,4 +397,10 @@ class TokeniserTests extends ParsleyTest { p.parse("bye") shouldBe a [Success[_]] p.parse("Bye") shouldBe a [Success[_]] } + + "issue #199" should "not regress: whitespace should work without comments defined" in { + val lang = token.LanguageDef.plain.copy(space = token.Predicate(_.isWhitespace)) + val lexer = new token.Lexer(lang) + lexer.whiteSpace.parse("[") shouldBe a [Success[_]] + } }