Huge dump of refactored code. Still in the middle of the changes that are to be squashed later in a one huge monster commit, because there is no value in anything atomic here.
136 lines
4.3 KiB
Rust
136 lines
4.3 KiB
Rust
use std::{fs, path::PathBuf};
|
|
|
|
use rottlib::lexer::{Keyword, Token, TokenizedFile};
|
|
|
|
/// Returns the path to a fixture file in `tests/fixtures/`.
|
|
fn fixture_file_path(name: &str) -> PathBuf {
|
|
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
|
.join("tests")
|
|
.join("fixtures")
|
|
.join(name)
|
|
}
|
|
|
|
/// Loads a fixture source file as UTF-8 text.
|
|
fn read_fixture_source(name: &str) -> String {
|
|
fs::read_to_string(fixture_file_path(name))
|
|
.unwrap_or_else(|e| panic!("failed to read fixture {name}: {e}"))
|
|
}
|
|
|
|
/// Returns the token at the given token index on a physical line.
|
|
///
|
|
/// Here `line` is 1-based, to match human line numbers in fixture files.
|
|
/// `token_index` is 0-based within `TokenizedFile::line_tokens`.
|
|
fn token_on_line(file: &TokenizedFile<'_>, line: usize, token_index: usize) -> Option<Token> {
|
|
file.line_tokens(line - 1)
|
|
.nth(token_index)
|
|
.map(|(_, token_data)| token_data.token)
|
|
}
|
|
|
|
/// Returns reconstructed visible text for a physical line.
|
|
///
|
|
/// Here `line` is 1-based, to match human line numbers in fixture files.
|
|
fn line_text(file: &TokenizedFile<'_>, line: usize) -> Option<String> {
|
|
file.line_text(line - 1)
|
|
}
|
|
|
|
#[test]
|
|
fn command_api_fixture_queries() {
|
|
let source = read_fixture_source("CommandAPI.uc");
|
|
let file = TokenizedFile::tokenize(&source);
|
|
assert_eq!(file.line_count(), 1578);
|
|
|
|
assert_eq!(
|
|
line_text(&file, 704).as_deref(),
|
|
Some(
|
|
"public final function CommandConfigInfo ResolveCommandForUserID(BaseText itemName, UserID id) {"
|
|
)
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 806).as_deref(),
|
|
Some(" _.memory.Free(wrapper);")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 1274).as_deref(),
|
|
Some("/// Method must be called after [`Voting`] with a given name is added.")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 14).as_deref(),
|
|
Some(" * Acedia is distributed in the hope that it will be useful,")
|
|
);
|
|
|
|
let token = token_on_line(&file, 22, 0).unwrap();
|
|
assert_eq!(token, Token::Keyword(Keyword::Class));
|
|
|
|
let token = token_on_line(&file, 1577, 0).unwrap();
|
|
assert_eq!(token, Token::Keyword(Keyword::DefaultProperties));
|
|
|
|
let token = token_on_line(&file, 649, 4).unwrap();
|
|
assert_eq!(token, Token::Whitespace);
|
|
}
|
|
|
|
#[test]
|
|
fn dbrecord_fixture_queries() {
|
|
let source = read_fixture_source("DBRecord.uc");
|
|
let file = TokenizedFile::tokenize(&source);
|
|
assert_eq!(file.line_count(), 1199);
|
|
|
|
assert_eq!(
|
|
line_text(&file, 149).as_deref(),
|
|
Some(" * However, JSON pointers are not convenient or efficient enough for that,")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 787).as_deref(),
|
|
Some(" * 3. 'number' -> either `IntBox` or `FloatBox`, depending on")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 1023).as_deref(),
|
|
Some(" bool makeMutable)")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 29).as_deref(),
|
|
Some(" config(AcediaDB);")
|
|
);
|
|
|
|
let token = token_on_line(&file, 565, 0).unwrap();
|
|
assert_eq!(token, Token::BlockComment);
|
|
|
|
let token = token_on_line(&file, 467, 10).unwrap();
|
|
assert_eq!(token, Token::Identifier);
|
|
|
|
let token = token_on_line(&file, 467, 9).unwrap();
|
|
assert_eq!(token, Token::LeftParenthesis);
|
|
}
|
|
|
|
#[test]
|
|
fn kvehicle_fixture_queries() {
|
|
let source = read_fixture_source("KVehicle.uc");
|
|
let file = TokenizedFile::tokenize(&source);
|
|
assert_eq!(file.line_count(), 326);
|
|
|
|
assert_eq!(
|
|
line_text(&file, 12).as_deref(),
|
|
Some(" virtual void setPhysics(BYTE NewPhysics, AActor *NewFloor, FVector NewFloorV);")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 127).as_deref(),
|
|
Some(" pc.myHUD.bCrosshairShow = pc.myHUD.default.bCrosshairShow;")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 264).as_deref(),
|
|
Some(" //////////////////////////////////////////////////////")
|
|
);
|
|
assert_eq!(
|
|
line_text(&file, 299).as_deref(),
|
|
Some(" ExitPositions(0)=(X=0,Y=0,Z=0)")
|
|
);
|
|
|
|
let token = token_on_line(&file, 17, 0).unwrap();
|
|
assert_eq!(token, Token::Newline);
|
|
|
|
let token = token_on_line(&file, 20, 7).unwrap();
|
|
assert_eq!(token, Token::Less);
|
|
|
|
let token = token_on_line(&file, 246, 2).unwrap();
|
|
assert_eq!(token, Token::Increment);
|
|
}
|