Skip to content

Commit

Permalink
tests: macro-ify test_version and test_tokenizer
Browse files Browse the repository at this point in the history
This commit also provides some improved documentation for the error reporting stuff.
Note that `test_files` is still basically a giant pile of technical debt.
  • Loading branch information
rpitasky committed Sep 8, 2024
1 parent 49d75a4 commit 3cd47b4
Show file tree
Hide file tree
Showing 9 changed files with 51 additions and 25 deletions.
21 changes: 15 additions & 6 deletions test-files/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,24 @@ mod tests {
}
}

// Ideally this would be a constant, but I don't want to have to make os_version dyn
pub fn test_version() -> titokens::Version {
titokens::Version {
model: titokens::Model::TI84PCE,
pub use titokens::{Version, Tokenizer, Model};

#[macro_export]
macro_rules! test_version {
() => {$crate::Version {
model: $crate::Model::TI84PCE,
os_version: "5.3.0".to_string(),
}
}};
}

#[macro_export]
macro_rules! test_tokenizer {
() => {$crate::Tokenizer::new($crate::test_version!(), "en")};
}


pub fn load_test_data(file: &str) -> titokens::Tokens {
let tokenizer = titokens::Tokenizer::new(test_version(), "en");
let tokenizer = test_tokenizer!();

let (tokens, _boundaries) = tokenizer
.tokenize(&std::fs::read_to_string(env!("TESTS_PATH").to_owned() + file).unwrap())
Expand Down
5 changes: 2 additions & 3 deletions ti-basic-optimizer/src/analyze/labels.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,11 @@ impl Program {
mod tests {
use super::*;
use crate::label_name;
use test_files::{load_test_data, test_version};
use titokens::Tokenizer;
use test_files::{load_test_data, test_tokenizer};

fn program() -> Program {
let mut tokens = load_test_data("/snippets/analysis/labels.txt");
let tokenizer = Tokenizer::new(test_version(), "en");
let tokenizer = test_tokenizer!();

Program::from_tokens(&mut tokens, &tokenizer)
}
Expand Down
21 changes: 19 additions & 2 deletions ti-basic-optimizer/src/error_reporting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,14 +104,17 @@ enum LabelKind {
}

impl LabelKind {
pub fn string_indices(&self, token_boundaries: &TokenBoundaries) -> Range<usize> {
fn string_indices(&self, token_boundaries: &TokenBoundaries) -> Range<usize> {
match self {
LabelKind::Single(tok_idx) => token_boundaries.single(*tok_idx),
LabelKind::Span(tok_range) => token_boundaries.range(tok_range.clone()),
}
}
}

/// `TokenReport` is used to report errors at the token level.
///
/// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position).
#[derive(Debug, Clone)]
pub struct TokenReport {
location: usize,
Expand All @@ -123,6 +126,9 @@ pub struct TokenReport {
}

impl TokenReport {
/// New error at the provided token index.
///
/// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position).
#[must_use]
pub fn new(location: usize, message: &str, suggestion: Option<&str>) -> Self {
TokenReport {
Expand All @@ -135,14 +141,20 @@ impl TokenReport {
}
}

/// Add a label at the provided range of token indices.
///
/// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position).
#[must_use]
pub fn with_span_label(mut self, location: std::ops::Range<usize>, message: &str) -> Self {
pub fn with_span_label(mut self, location: Range<usize>, message: &str) -> Self {
self.labels
.push((LabelKind::Span(location), message.to_string()));

self
}

/// Add a label at the provided token index.
///
/// Token indices are usually obtained by calling [`Tokens::current_position`](titokens::Tokens::current_position).
#[must_use]
pub fn with_label(mut self, location: usize, message: &str) -> Self {
self.labels
Expand All @@ -151,13 +163,18 @@ impl TokenReport {
self
}

/// Provide an error code for this error.
#[must_use]
pub fn with_code(mut self, error_code: usize) -> Self {
self.code = Some(error_code);

self
}

/// Format and print this error to stderr, using the provided [`TokenBoundaries`] to translate
/// the tokens.
///
/// [`ariadne`] seems to choke on Unicode input; tokenize without Unicode.
pub fn error(self, boundaries: TokenBoundaries) {
let mut builder = ariadne::Report::build(
ariadne::ReportKind::Error,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ mod tests {
let savings = expr.optimize_parentheses();
assert_eq!(expected_savings, savings);

let reconstructed = expr.reconstruct(&test_version().into());
let reconstructed = expr.reconstruct(&test_version!().into());
let mut optimized = reconstructed.clone();
Expression::strip_closing_parenthesis(&mut optimized);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,6 @@ impl Reconstruct for WriteDigits {
#[cfg(test)]
mod tests {
use super::*;
use test_files::test_version;
use tifloats::tifloat;

#[test]
Expand Down
2 changes: 1 addition & 1 deletion ti-basic-optimizer/src/parse/commands/control_flow/menu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ mod tests {
.unwrap();

assert_eq!(
menu.reconstruct(&test_version().into()),
menu.reconstruct(&test_version!().into()),
data.collect::<Vec<_>>()
);
}
Expand Down
2 changes: 1 addition & 1 deletion ti-basic-optimizer/src/parse/components/list_name.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,6 @@ mod tests {
let parsed = ListName::parse(tokens.next().unwrap(), &mut tokens)
.unwrap()
.unwrap();
assert_eq!(parsed.reconstruct(&test_version().into()), name);
assert_eq!(parsed.reconstruct(&test_version!().into()), name);
}
}
2 changes: 1 addition & 1 deletion ti-basic-optimizer/src/parse/expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ mod tests {
};

($name: ident, $path: expr) => {
test_case!($name, $path, test_files::test_version());
test_case!($name, $path, test_files::test_version!());
};
}

Expand Down
20 changes: 11 additions & 9 deletions ti-basic-optimizer/src/parse/program.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,20 +100,20 @@ impl Program {
#[cfg(test)]
mod tests {
use super::*;
use test_files::{load_test_data, test_version};
use test_files::{load_test_data, test_tokenizer};

#[test]
fn parses_newlines_correctly_with_strings() {
let mut tokens = load_test_data("/snippets/parsing/strings/newline-stuff.txt");
let mut program = Program::from_tokens(&mut tokens, &Tokenizer::new(test_version(), "en"));
let mut program = Program::from_tokens(&mut tokens, &test_tokenizer!());

assert_eq!(program.lines.len(), 5);
}

#[test]
fn skips_blank_lines() {
let mut tokens = load_test_data("/snippets/parsing/ten-blank-lines.txt");
let mut program = Program::from_tokens(&mut tokens, &Tokenizer::new(test_version(), "en"));
let mut program = Program::from_tokens(&mut tokens, &test_tokenizer!());

assert_eq!(program.lines.len(), 0);
}
Expand All @@ -125,27 +125,29 @@ mod tests {
/// 4. Export to file B
/// 5. Then, check A == B
mod round_trip {
use test_files::test_version;
use super::*;
macro_rules! round_trip {
($name: ident, $path: expr, $debug: expr) => {
#[test]
fn $name() {
let mut original = load_test_data($path);
let tokenizer = Tokenizer::new(test_version(), "en");
let tokenizer = test_tokenizer!();
let config = test_version!().into();
let original_program = Program::from_tokens(&mut original, &tokenizer);
let a = original_program.reconstruct(&test_version().into());
let a = original_program.reconstruct(&config);
let a_program = Program::from_tokens(
&mut Tokens::from_vec(a.clone(), Some(test_version())),
&mut Tokens::from_vec(a.clone(), Some(test_version!())),
&tokenizer,
);
let b = a_program.reconstruct(&test_version().into());
let b = a_program.reconstruct(&config);

if $debug {
dbg!(
Tokens::from_vec(a.clone(), Some(test_version())).to_string(&tokenizer)
Tokens::from_vec(a.clone(), Some(test_version!())).to_string(&tokenizer)
);
dbg!(
Tokens::from_vec(b.clone(), Some(test_version())).to_string(&tokenizer)
Tokens::from_vec(b.clone(), Some(test_version!())).to_string(&tokenizer)
);
}

Expand Down

0 comments on commit 3cd47b4

Please sign in to comment.