diff --git a/test-files/src/lib.rs b/test-files/src/lib.rs index d1e00c8..364cf46 100644 --- a/test-files/src/lib.rs +++ b/test-files/src/lib.rs @@ -26,15 +26,24 @@ mod tests { } } -// Ideally this would be a constant, but I don't want to have to make os_version dyn -pub fn test_version() -> titokens::Version { - titokens::Version { - model: titokens::Model::TI84PCE, +pub use titokens::{Version, Tokenizer, Model}; + +#[macro_export] +macro_rules! test_version { + () => {$crate::Version { + model: $crate::Model::TI84PCE, os_version: "5.3.0".to_string(), - } + }}; +} + +#[macro_export] +macro_rules! test_tokenizer { + () => {$crate::Tokenizer::new($crate::test_version!(), "en")}; } + + pub fn load_test_data(file: &str) -> titokens::Tokens { - let tokenizer = titokens::Tokenizer::new(test_version(), "en"); + let tokenizer = test_tokenizer!(); let (tokens, _boundaries) = tokenizer .tokenize(&std::fs::read_to_string(env!("TESTS_PATH").to_owned() + file).unwrap()) diff --git a/ti-basic-optimizer/src/analyze/labels.rs b/ti-basic-optimizer/src/analyze/labels.rs index 2127ace..168870f 100644 --- a/ti-basic-optimizer/src/analyze/labels.rs +++ b/ti-basic-optimizer/src/analyze/labels.rs @@ -51,12 +51,11 @@ impl Program { mod tests { use super::*; use crate::label_name; - use test_files::{load_test_data, test_version}; - use titokens::Tokenizer; + use test_files::{load_test_data, test_tokenizer}; fn program() -> Program { let mut tokens = load_test_data("/snippets/analysis/labels.txt"); - let tokenizer = Tokenizer::new(test_version(), "en"); + let tokenizer = test_tokenizer!(); Program::from_tokens(&mut tokens, &tokenizer) } diff --git a/ti-basic-optimizer/src/error_reporting.rs b/ti-basic-optimizer/src/error_reporting.rs index 0db51f8..0840ed7 100644 --- a/ti-basic-optimizer/src/error_reporting.rs +++ b/ti-basic-optimizer/src/error_reporting.rs @@ -104,7 +104,7 @@ enum LabelKind { } impl LabelKind { - pub fn string_indices(&self, token_boundaries: &TokenBoundaries) -> Range { + fn string_indices(&self, token_boundaries: &TokenBoundaries) -> Range { match self { LabelKind::Single(tok_idx) => token_boundaries.single(*tok_idx), LabelKind::Span(tok_range) => token_boundaries.range(tok_range.clone()), @@ -112,6 +112,9 @@ impl LabelKind { } } +/// `TokenReport` is used to report errors at the token level. +/// +/// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position). #[derive(Debug, Clone)] pub struct TokenReport { location: usize, @@ -123,6 +126,9 @@ pub struct TokenReport { } impl TokenReport { + /// New error at the provided token index. + /// + /// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position). #[must_use] pub fn new(location: usize, message: &str, suggestion: Option<&str>) -> Self { TokenReport { @@ -135,14 +141,20 @@ impl TokenReport { } } + /// Add a label at the provided range of token indices. + /// + /// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position). #[must_use] - pub fn with_span_label(mut self, location: std::ops::Range, message: &str) -> Self { + pub fn with_span_label(mut self, location: Range, message: &str) -> Self { self.labels .push((LabelKind::Span(location), message.to_string())); self } + /// Add a label at the provided token index. + /// + /// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position). #[must_use] pub fn with_label(mut self, location: usize, message: &str) -> Self { self.labels @@ -151,6 +163,7 @@ impl TokenReport { self } + /// Provide an error code for this error. #[must_use] pub fn with_code(mut self, error_code: usize) -> Self { self.code = Some(error_code); @@ -158,6 +171,10 @@ impl TokenReport { self } + /// Format and print this error to stderr, using the provided [`TokenBoundaries`] to translate + /// the tokens. + /// + /// [`ariadne`] seems to choke on Unicode input; tokenize without Unicode. pub fn error(self, boundaries: TokenBoundaries) { let mut builder = ariadne::Report::build( ariadne::ReportKind::Error, diff --git a/ti-basic-optimizer/src/optimize/expressions/parenthesis_optimization.rs b/ti-basic-optimizer/src/optimize/expressions/parenthesis_optimization.rs index 2308bcb..d24e21f 100644 --- a/ti-basic-optimizer/src/optimize/expressions/parenthesis_optimization.rs +++ b/ti-basic-optimizer/src/optimize/expressions/parenthesis_optimization.rs @@ -221,7 +221,7 @@ mod tests { let savings = expr.optimize_parentheses(); assert_eq!(expected_savings, savings); - let reconstructed = expr.reconstruct(&test_version().into()); + let reconstructed = expr.reconstruct(&test_version!().into()); let mut optimized = reconstructed.clone(); Expression::strip_closing_parenthesis(&mut optimized); diff --git a/ti-basic-optimizer/src/optimize/strategies/numeric_literal/write_digits.rs b/ti-basic-optimizer/src/optimize/strategies/numeric_literal/write_digits.rs index 0076d86..7ff5cf4 100644 --- a/ti-basic-optimizer/src/optimize/strategies/numeric_literal/write_digits.rs +++ b/ti-basic-optimizer/src/optimize/strategies/numeric_literal/write_digits.rs @@ -158,7 +158,6 @@ impl Reconstruct for WriteDigits { #[cfg(test)] mod tests { use super::*; - use test_files::test_version; use tifloats::tifloat; #[test] diff --git a/ti-basic-optimizer/src/parse/commands/control_flow/menu.rs b/ti-basic-optimizer/src/parse/commands/control_flow/menu.rs index 6fa7de8..f258c13 100644 --- a/ti-basic-optimizer/src/parse/commands/control_flow/menu.rs +++ b/ti-basic-optimizer/src/parse/commands/control_flow/menu.rs @@ -127,7 +127,7 @@ mod tests { .unwrap(); assert_eq!( - menu.reconstruct(&test_version().into()), + menu.reconstruct(&test_version!().into()), data.collect::>() ); } diff --git a/ti-basic-optimizer/src/parse/components/list_name.rs b/ti-basic-optimizer/src/parse/components/list_name.rs index fee6c82..9f43d63 100644 --- a/ti-basic-optimizer/src/parse/components/list_name.rs +++ b/ti-basic-optimizer/src/parse/components/list_name.rs @@ -139,6 +139,6 @@ mod tests { let parsed = ListName::parse(tokens.next().unwrap(), &mut tokens) .unwrap() .unwrap(); - assert_eq!(parsed.reconstruct(&test_version().into()), name); + assert_eq!(parsed.reconstruct(&test_version!().into()), name); } } diff --git a/ti-basic-optimizer/src/parse/expression.rs b/ti-basic-optimizer/src/parse/expression.rs index 671cc19..8788043 100644 --- a/ti-basic-optimizer/src/parse/expression.rs +++ b/ti-basic-optimizer/src/parse/expression.rs @@ -313,7 +313,7 @@ mod tests { }; ($name: ident, $path: expr) => { - test_case!($name, $path, test_files::test_version()); + test_case!($name, $path, test_files::test_version!()); }; } diff --git a/ti-basic-optimizer/src/parse/program.rs b/ti-basic-optimizer/src/parse/program.rs index de25651..dafe8f0 100644 --- a/ti-basic-optimizer/src/parse/program.rs +++ b/ti-basic-optimizer/src/parse/program.rs @@ -100,12 +100,12 @@ impl Program { #[cfg(test)] mod tests { use super::*; - use test_files::{load_test_data, test_version}; + use test_files::{load_test_data, test_tokenizer}; #[test] fn parses_newlines_correctly_with_strings() { let mut tokens = load_test_data("/snippets/parsing/strings/newline-stuff.txt"); - let mut program = Program::from_tokens(&mut tokens, &Tokenizer::new(test_version(), "en")); + let mut program = Program::from_tokens(&mut tokens, &test_tokenizer!()); assert_eq!(program.lines.len(), 5); } @@ -113,7 +113,7 @@ mod tests { #[test] fn skips_blank_lines() { let mut tokens = load_test_data("/snippets/parsing/ten-blank-lines.txt"); - let mut program = Program::from_tokens(&mut tokens, &Tokenizer::new(test_version(), "en")); + let mut program = Program::from_tokens(&mut tokens, &test_tokenizer!()); assert_eq!(program.lines.len(), 0); } @@ -125,27 +125,29 @@ mod tests { /// 4. Export to file B /// 5. Then, check A == B mod round_trip { + use test_files::test_version; use super::*; macro_rules! round_trip { ($name: ident, $path: expr, $debug: expr) => { #[test] fn $name() { let mut original = load_test_data($path); - let tokenizer = Tokenizer::new(test_version(), "en"); + let tokenizer = test_tokenizer!(); + let config = test_version!().into(); let original_program = Program::from_tokens(&mut original, &tokenizer); - let a = original_program.reconstruct(&test_version().into()); + let a = original_program.reconstruct(&config); let a_program = Program::from_tokens( - &mut Tokens::from_vec(a.clone(), Some(test_version())), + &mut Tokens::from_vec(a.clone(), Some(test_version!())), &tokenizer, ); - let b = a_program.reconstruct(&test_version().into()); + let b = a_program.reconstruct(&config); if $debug { dbg!( - Tokens::from_vec(a.clone(), Some(test_version())).to_string(&tokenizer) + Tokens::from_vec(a.clone(), Some(test_version!())).to_string(&tokenizer) ); dbg!( - Tokens::from_vec(b.clone(), Some(test_version())).to_string(&tokenizer) + Tokens::from_vec(b.clone(), Some(test_version!())).to_string(&tokenizer) ); }