Skip to content

Commit

Permalink
Use a struct instead of a tuple for SymbolicStringStart tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
vkleen committed Feb 1, 2023
1 parent 1bcc9b2 commit 8b6bcae
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 6 deletions.
5 changes: 3 additions & 2 deletions src/parser/grammar.lalrpop
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ use lalrpop_util::ErrorRecovery;
use super::{
ExtendedTerm,
utils::*,
lexer::{Token, NormalToken, StringToken, MultiStringToken},
lexer::{Token, NormalToken, StringToken, MultiStringToken, SymbolicStringStart},
error::ParseError,
uniterm::*,
};
Expand Down Expand Up @@ -913,7 +913,8 @@ extern {
"\"" => Token::Normal(NormalToken::DoubleQuote),
"\"%" => Token::MultiStr(MultiStringToken::End),
"m%\"" => Token::Normal(NormalToken::MultiStringStart(<usize>)),
"symbolic string start" => Token::Normal(NormalToken::SymbolicStringStart((<&'input str>, <usize>))),
"symbolic string start" => Token::Normal(NormalToken::SymbolicStringStart(
SymbolicStringStart{prefix: <&'input str>, percent_count: <usize>})),

"Num" => Token::Normal(NormalToken::Num),
"Dyn" => Token::Normal(NormalToken::Dyn),
Expand Down
20 changes: 16 additions & 4 deletions src/parser/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,15 @@ use std::ops::Range;

fn symbolic_string_prefix_and_length<'input>(
lex: &mut logos::Lexer<'input, NormalToken<'input>>,
) -> (&'input str, usize) {
) -> SymbolicStringStart<'input> {
let slice = lex.slice();
let (prefix, postfix) = slice
.rsplit_once('-')
.expect("The logos regexp ensures this succeeds");
(prefix, postfix.len())
SymbolicStringStart {
prefix,
percent_count: postfix.len(),
}
}

// **IMPORTANT**
Expand Down Expand Up @@ -170,7 +173,7 @@ pub enum NormalToken<'input> {
#[regex("m(%+)\"", |lex| lex.slice().len())]
MultiStringStart(usize),
#[regex("[a-zA-Z][_a-zA-Z0-9-']*-s(%+)\"", symbolic_string_prefix_and_length)]
SymbolicStringStart((&'input str, usize)),
SymbolicStringStart(SymbolicStringStart<'input>),

#[token("%tag%")]
Tag,
Expand Down Expand Up @@ -336,6 +339,12 @@ pub const KEYWORDS: &[&str] = &[
"priority", "force",
];

#[derive(Debug, Clone, PartialEq)]
pub struct SymbolicStringStart<'input> {
pub prefix: &'input str,
pub percent_count: usize,
}

/// The tokens in string mode.
#[derive(Logos, Debug, PartialEq, Eq, Clone)]
pub enum StringToken<'input> {
Expand Down Expand Up @@ -620,7 +629,10 @@ impl<'input> Iterator for Lexer<'input> {
}
Some(Normal(
NormalToken::MultiStringStart(delim_size)
| NormalToken::SymbolicStringStart((_, delim_size)),
| NormalToken::SymbolicStringStart(SymbolicStringStart {
percent_count: delim_size,
..
}),
)) => {
// for interpolation & closing delimeters we only care about
// the number of `%`s (plus the opening `"` or `{`) so we
Expand Down

0 comments on commit 8b6bcae

Please sign in to comment.