Skip to content

Commit

Permalink
Change desugaring of symbolic string to align with #1039
Browse files Browse the repository at this point in the history
  • Loading branch information
vkleen committed Feb 1, 2023
1 parent 8b6bcae commit 323a5c9
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 12 deletions.
5 changes: 3 additions & 2 deletions src/parser/grammar.lalrpop
Original file line number Diff line number Diff line change
Expand Up @@ -518,8 +518,9 @@ StrChunks: RichTerm = {
}).collect();

RichTerm::from(build_record([
(FieldPathElem::Ident("tag".into()), RichTerm::from(Term::Enum("SymbolicString".into()))),
(FieldPathElem::Ident("prefix".into()), RichTerm::from(Term::Str(prefix.to_owned()))),
(FieldPathElem::Ident("chunks".into()), RichTerm::from(Term::Array(terms, Default::default())))
(FieldPathElem::Ident("fragments".into()), RichTerm::from(Term::Array(terms, Default::default())))
], Default::default()))
} else {
let mut chunks = chunks;
Expand Down Expand Up @@ -914,7 +915,7 @@ extern {
"\"%" => Token::MultiStr(MultiStringToken::End),
"m%\"" => Token::Normal(NormalToken::MultiStringStart(<usize>)),
"symbolic string start" => Token::Normal(NormalToken::SymbolicStringStart(
SymbolicStringStart{prefix: <&'input str>, percent_count: <usize>})),
SymbolicStringStart{prefix: <&'input str>, length: <usize>})),

"Num" => Token::Normal(NormalToken::Num),
"Dyn" => Token::Normal(NormalToken::Dyn),
Expand Down
7 changes: 3 additions & 4 deletions src/parser/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fn symbolic_string_prefix_and_length<'input>(
.expect("The logos regexp ensures this succeeds");
SymbolicStringStart {
prefix,
percent_count: postfix.len(),
length: postfix.len(),
}
}

Expand Down Expand Up @@ -342,7 +342,7 @@ pub const KEYWORDS: &[&str] = &[
#[derive(Debug, Clone, PartialEq)]
pub struct SymbolicStringStart<'input> {
pub prefix: &'input str,
pub percent_count: usize,
pub length: usize,
}

/// The tokens in string mode.
Expand Down Expand Up @@ -630,8 +630,7 @@ impl<'input> Iterator for Lexer<'input> {
Some(Normal(
NormalToken::MultiStringStart(delim_size)
| NormalToken::SymbolicStringStart(SymbolicStringStart {
percent_count: delim_size,
..
length: delim_size, ..
}),
)) => {
// for interpolation & closing delimeters we only care about
Expand Down
23 changes: 18 additions & 5 deletions src/parser/tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::rc::Rc;

use super::lexer::{Lexer, MultiStringToken, NormalToken, StringToken, Token};
use super::lexer::{Lexer, MultiStringToken, NormalToken, StringToken, SymbolicStringStart, Token};
use super::utils::{build_record, FieldPathElem};
use crate::error::ParseError;
use crate::identifier::Ident;
Expand Down Expand Up @@ -41,12 +41,16 @@ fn mk_single_chunk(s: &str) -> RichTerm {
fn mk_symbolic_single_chunk(prefix: &str, s: &str) -> RichTerm {
build_record(
[
(
FieldPathElem::Ident("tag".into()),
RichTerm::from(Term::Enum("SymbolicString".into())),
),
(
FieldPathElem::Ident("prefix".into()),
RichTerm::from(Term::Str(prefix.to_owned())),
),
(
FieldPathElem::Ident("chunks".into()),
FieldPathElem::Ident("fragments".into()),
RichTerm::from(Array(
Array::new(Rc::new([Str(String::from(s)).into()])),
Default::default(),
Expand Down Expand Up @@ -388,15 +392,21 @@ fn string_lexing() {
"empty symbolic string lexes like multi-line str",
r#"foo-s%""%"#,
vec![
Token::Normal(NormalToken::SymbolicStringStart(("foo", 3))),
Token::Normal(NormalToken::SymbolicStringStart(SymbolicStringStart {
prefix: "foo",
length: 3,
})),
Token::MultiStr(MultiStringToken::End),
],
),
(
"symbolic string with interpolation",
r#"foo-s%"text %{ 1 } etc."%"#,
vec![
Token::Normal(NormalToken::SymbolicStringStart(("foo", 3))),
Token::Normal(NormalToken::SymbolicStringStart(SymbolicStringStart {
prefix: "foo",
length: 3,
})),
Token::MultiStr(MultiStringToken::Literal("text ")),
Token::MultiStr(MultiStringToken::Interpolation),
Token::Normal(NormalToken::NumLiteral(1.0)),
Expand All @@ -409,7 +419,10 @@ fn string_lexing() {
"empty symbolic string with tag",
r#"tf-s%""%"#,
vec![
Token::Normal(NormalToken::SymbolicStringStart(("tf", 3))),
Token::Normal(NormalToken::SymbolicStringStart(SymbolicStringStart {
prefix: "tf",
length: 3,
})),
Token::MultiStr(MultiStringToken::End),
],
),
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/pass/symbolic-strings.ncl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
let {check, ..} = import "lib/assert.ncl" in
let sym = fun prefix_ chunks_ => { prefix = prefix_, chunks = chunks_ } in
let sym = fun prefix_ fragments_ => { tag = `SymbolicString, prefix = prefix_, fragments = fragments_ } in
[
# Static symbolic string
foo-s%"hello, world"% == sym "foo" ["hello, world"] ,
Expand Down

0 comments on commit 323a5c9

Please sign in to comment.