Fix comments

This commit is contained in:
dkanus 2025-09-16 08:13:04 +07:00
parent d519ecab2e
commit d9923fd762
3 changed files with 8 additions and 8 deletions

View File

@ -75,7 +75,7 @@ fn main() {
let path = entry.path();
match fs::read(path) {
Ok(raw_bytes) => {
// Autodetect encoding for old Unreal script sources
// Auto-detect encoding for old Unreal script sources
let (encoding_label, _, _) = chardet::detect(&raw_bytes);
let encoding = encoding_rs::Encoding::for_label(encoding_label.as_bytes())
.unwrap_or(encoding_rs::UTF_8);
@ -109,7 +109,7 @@ fn main() {
elapsed_time
);
// Roundtrip check
// Round-trip check
for ((path, original), (_, tokenized_file)) in uc_files.iter().zip(tokenized_files.iter()) {
let reconstructed = tokenized_file.reconstruct_source();
if original != &reconstructed {

View File

@ -47,7 +47,7 @@ impl<'src> DebugTools for super::TokenizedFile<'src> {
// the remainder of a multi-line token that started earlier.
(Some(origin_row), None) => {
println!(
"\t[Continued from line {} no new tokens here]",
"\t[Continued from line {} - no new tokens here]",
origin_row + 1
);
}
@ -75,7 +75,7 @@ fn dump_spans<'a>(spans: &[super::TokenPiece<'a>]) {
let start = col_utf16;
let end = start + span.length_utf16;
println!(
"\t\t{:?} @ {}{}: {:?}",
"\t\t{:?} @ {}-{}: {:?}",
span.token, start, end, span.lexeme
);
col_utf16 = end;

View File

@ -2,15 +2,15 @@
//!
//! ## Notable details
//!
//! Lexer for UnrealScript that recognizes inline `cpptext { }` blocks.
//! Lexer for UnrealScript that recognizes inline `cpptext { ... }` blocks.
//!
//! In UnrealScript, `cpptext` lets authors embed raw C++ between braces.
//! Because whitespace, newlines, or comments may appear between the
//! `cpptext` keyword and the opening `{`, the lexer must remember that
//! it has just seen `cpptext` - hence a state machine.
//!
//! Modes
//! ------
//! ## Modes
//!
//! - **Normal** - ordinary UnrealScript tokens.
//! - **AwaitingCppBlock** - after `cpptext`, waiting for the next `{`.
//!
@ -476,7 +476,7 @@ fn consume_cpp_block(lexer: &mut Lexer<Token>) {
}
}
/// Consume over a C-style `/* */` comment (without nesting).
/// Consume over a C-style `/* ... */` comment (without nesting).
///
/// Assumes that opener `/*` is already consumed.
fn consume_c_comment(lexer: &mut Lexer<Token>) {