rott/rottlib/tests/common.rs
dkanus 588790b9b4 Refactor everything
Huge dump of refactored code. Still in the middle of the changes that
are to be squashed later in a one huge monster commit, because there is
no value in anything atomic here.
2026-04-05 20:32:11 +07:00

64 lines
2.0 KiB
Rust

use std::path::{Path, PathBuf};
use rottlib::lexer::{Token, TokenData, TokenPosition, TokenizedFile};
pub fn fixture_path(name: &str) -> PathBuf {
Path::new(env!("CARGO_MANIFEST_DIR"))
.join("tests")
.join("fixtures")
.join(name)
}
pub fn read_fixture(name: &str) -> String {
let path = fixture_path(name);
std::fs::read_to_string(&path)
.unwrap_or_else(|e| panic!("failed to read fixture {}: {e}", path.display()))
}
pub fn with_fixture(name: &str, f: impl for<'src> FnOnce(&'src str, TokenizedFile<'src>)) {
let source = read_fixture(name);
let file = TokenizedFile::tokenize(&source);
f(&source, file);
}
pub fn line_lexemes<'file, 'src>(file: &'file TokenizedFile<'src>, line: usize) -> Vec<&'src str> {
file.line_tokens(line).map(|(_, t)| t.lexeme).collect()
}
pub fn line_tokens<'src>(file: &TokenizedFile<'src>, line: usize) -> Vec<Token> {
file.line_tokens(line).map(|(_, t)| t.token).collect()
}
pub fn line_positions<'src>(file: &TokenizedFile<'src>, line: usize) -> Vec<TokenPosition> {
file.line_tokens(line).map(|(pos, _)| pos).collect()
}
pub fn line_pairs<'file, 'src>(
file: &'file TokenizedFile<'src>,
line: usize,
) -> Vec<(Token, &'src str)> {
file.line_tokens(line)
.map(|(_, t)| (t.token, t.lexeme))
.collect()
}
pub fn all_lexemes<'file, 'src>(file: &'file TokenizedFile<'src>) -> Vec<&'src str> {
file.iter().map(|(_, t)| t.lexeme).collect()
}
pub fn all_tokens<'src>(file: &TokenizedFile<'src>) -> Vec<Token> {
file.iter().map(|(_, t)| t.token).collect()
}
pub fn token_at<'src>(file: &TokenizedFile<'src>, index: usize) -> Option<TokenData<'src>> {
file.token_at(TokenPosition(index))
}
pub fn reconstruct_source<'file, 'src>(file: &'file TokenizedFile<'src>) -> String {
file.iter().map(|(_, t)| t.lexeme).collect()
}
pub fn find_line<'src>(file: &TokenizedFile<'src>, needle: &str) -> Option<usize> {
(0..file.line_count()).find(|&line| file.line_text(line).as_deref() == Some(needle))
}