Skip to content

Commit

Permalink
Unify tests for tokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
davesnx committed Jul 15, 2024
1 parent c77413d commit 2fd57cd
Show file tree
Hide file tree
Showing 5 changed files with 161 additions and 167 deletions.
4 changes: 3 additions & 1 deletion packages/css-property-parser/lib/Parser.re
Original file line number Diff line number Diff line change
Expand Up @@ -830,7 +830,9 @@ and property_animation_fill_mode = [%value.rec
and property_animation_iteration_count = [%value.rec
"[ <single-animation-iteration-count> ]#"
]
and property_animation_name = [%value.rec "[ <keyframes-name> | 'none' | <interpolation> ]#"]
and property_animation_name = [%value.rec
"[ <keyframes-name> | 'none' | <interpolation> ]#"
]
and property_animation_play_state = [%value.rec
"[ <single-animation-play-state> ]#"
]
Expand Down
2 changes: 1 addition & 1 deletion packages/parser/lib/Ast.re
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/* We have records with identical field names. Type inference can sometimes struggle to determine which field you're referring to. This ambiguity can lead to compiler warnings or errors, we disable it with -30 because we always construct this with annotations in those cases. */
/* We have records with identical field names. Type inference can sometimes struggle to determine which field you're referring to. This ambiguity can lead to compiler warnings or errors, we disable it with -30 because we always construct this with annotations. */
[@warning "-30"];

[@deriving show({with_path: false})]
Expand Down
157 changes: 156 additions & 1 deletion packages/parser/test/Lexer_test.re
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
open Alcotest;

module Tokens = Styled_ppx_css_parser.Tokens;
module Lexer = Styled_ppx_css_parser.Lexer;
module Parser = Styled_ppx_css_parser.Parser;

Expand Down Expand Up @@ -158,4 +159,158 @@ let error_tests =
);
});

let tests = List.append(success_tests, error_tests);
let parse = input => {
let values =
switch (Lexer.from_string(input)) {
| Ok(values) => values
| Error(`Frozen) => failwith("Lexer got frozen")
};

let Lexer.{loc, _} = List.hd(values);
let values = values |> List.map((Lexer.{txt, _}) => txt);
(loc, values);
};

let render_token =
fun
| Tokens.EOF => ""
| t => Tokens.show_token(t);

let list_parse_tokens_to_string = tokens =>
tokens
|> List.rev
|> List.map(
fun
| Ok(token) => render_token(token)
| Error((token, err)) =>
"Error("
++ Tokens.show_error(err)
++ ") "
++ Tokens.show_token(token),
)
|> String.concat(" ")
|> String.trim;

let list_tokens_to_string = tokens =>
tokens |> List.map(render_token) |> String.concat(" ") |> String.trim;

let test_with_location =
[
({||}, [EOF], 0),
(" \n\t ", [Tokens.WS], 4),
({|"something"|}, [STRING("something")], 11),
// TODO: is that right?
({|#2|}, [HASH("2", `UNRESTRICTED)], 2),
({|#abc|}, [HASH("abc", `ID)], 4),
({|#|}, [DELIM("#")], 1),
({|'tuturu'|}, [STRING("tuturu")], 8),
({|(|}, [LEFT_PAREN], 1),
({|)|}, [RIGHT_PAREN], 1),
({|+12.3|}, [NUMBER(12.3)], 5),
({|+|}, [DELIM("+")], 1),
({|,|}, [COMMA], 1),
({|-45.6|}, [NUMBER(-45.6)], 5),
({|--potato|}, [IDENT("--potato")], 8),
({|-|}, [DELIM("-")], 1),
({|.7|}, [NUMBER(0.7)], 2),
({|.|}, [DELIM(".")], 1),
({|:|}, [COLON], 1),
({|;|}, [SEMI_COLON], 1),
({|<|}, [DELIM("<")], 1),
({|@mayushii|}, [AT_KEYWORD("mayushii")], 9),
({|@|}, [DELIM("@")], 1),
({|[|}, [LEFT_BRACKET], 1),
("\\@desu", [IDENT("@desu")], 6),
({|]|}, [RIGHT_BRACKET], 1),
({|12345678.9|}, [NUMBER(12345678.9)], 10),
({|bar|}, [IDENT("bar")], 3),
({|!|}, [DELIM("!")], 1),
("1 / 1", [NUMBER(1.), WS, DELIM("/"), WS, NUMBER(1.)], 5),
(
{|calc(10px + 10px)|},
[
FUNCTION("calc"),
DIMENSION(10., "px"),
WS,
DELIM("+"),
WS,
DIMENSION(10., "px"),
RIGHT_PAREN,
],
17,
),
(
{|background-image:url('img_tree.gif' )|},
[
IDENT("background-image"),
COLON,
FUNCTION("url"),
STRING("img_tree.gif"),
WS,
RIGHT_PAREN,
],
37,
),
(
{|calc(10px+ 10px)|},
[
FUNCTION("calc"),
DIMENSION(10., "px"),
DELIM("+"),
WS,
DIMENSION(10., "px"),
RIGHT_PAREN,
],
16,
),
({|calc(10%)|}, [FUNCTION("calc"), PERCENTAGE(10.), RIGHT_PAREN], 9),
(
{|$(Module.variable)|},
[
DELIM("$"),
LEFT_PAREN,
IDENT("Module"),
DELIM("."),
IDENT("variable"),
RIGHT_PAREN,
],
18,
),
(
{|$(Module.variable')|},
[
DELIM("$"),
LEFT_PAREN,
IDENT("Module"),
DELIM("."),
IDENT("variable'"),
RIGHT_PAREN,
],
19,
),
({|--color-main|}, [IDENT("--color-main")], 12),
]
|> List.mapi((_index, (input, output, last_position)) => {
let (loc, values) = parse(input);

loc.loc_end.pos_cnum == last_position
? ()
: Alcotest.fail(
"position should be "
++ string_of_int(last_position)
++ " received "
++ string_of_int(loc.loc_end.pos_cnum),
);

let assertion = () =>
Alcotest.check(
Alcotest.string,
"should succeed lexing: " ++ input,
list_parse_tokens_to_string(values),
list_tokens_to_string(output),
);

Alcotest.test_case(input, `Quick, assertion);
});

let tests = success_tests @ error_tests @ test_with_location;
6 changes: 1 addition & 5 deletions packages/parser/test/Runner.re
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
Alcotest.run(
"Lexer and Parser",
[
("Lexer", Lexer_test.tests),
("Parser", Parser_test.tests),
("Tokenizer", Tokenizer_test.tests),
],
[("Lexer", Lexer_test.tests), ("Parser", Parser_test.tests)],
);
159 changes: 0 additions & 159 deletions packages/parser/test/Tokenizer_test.re

This file was deleted.

0 comments on commit 2fd57cd

Please sign in to comment.