Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 18 additions & 19 deletions crates/vim9-lexer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
use std::{
cell::{Cell, RefCell},
collections::VecDeque,
fmt::{Debug, Display},
fmt::{Debug, Display, Write},
};

use anyhow::{Context, Result};
Expand Down Expand Up @@ -79,15 +79,16 @@ impl Debug for TokenText<'_> {

impl Display for TokenText<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
TokenText::Slice(s) => s.iter().collect::<String>(),
TokenText::Owned(s) => s.clone(),
TokenText::Empty => "".to_string(),
match self {
TokenText::Slice(s) => {
for c in *s {
f.write_char(*c)?;
}
Ok(())
}
)
TokenText::Owned(s) => write!(f, "{s}"),
TokenText::Empty => Ok(()),
}
}
}

Expand All @@ -102,10 +103,7 @@ impl<'a> TokenText<'a> {

pub fn equals(&self, val: &str) -> bool {
match self {
TokenText::Slice(s) => {
// This seems like I shouldn't have to do this... oh well
val.chars().collect::<Vec<char>>() == *s
}
TokenText::Slice(s) => s.iter().copied().eq(val.chars()),
TokenText::Owned(s) => s.as_str() == val,
TokenText::Empty => false,
}
Expand Down Expand Up @@ -1054,8 +1052,7 @@ pub fn snapshot_lexing(input: &str) -> String {

let mut output = String::new();
for (row, line) in input.lines().enumerate() {
output += line;
output += "\n";
let _ = writeln!(output, "{line}");

while let Some(tok) = tokens.pop_front() {
if tok.span.start_row != tok.span.end_row {
Expand All @@ -1067,10 +1064,12 @@ pub fn snapshot_lexing(input: &str) -> String {
break;
}

output += &" ".repeat(tok.span.start_col);
output += &"^".repeat(tok.span.end_col - tok.span.start_col);
output += &format!(" {tok:?}");
output += "\n"
let _ = writeln!(
output,
"{indent}{span} {tok:?}",
indent = " ".repeat(tok.span.start_col),
span = "^".repeat(tok.span.end_col - tok.span.start_col),
);
}
}

Expand Down
Loading