Skip to content

Commit d7fb7ab

Browse files
authored
Merge pull request #17592 from github/aibaars/cargo-fmt
Rust: run cargo fmt
2 parents 7b39608 + 6777a34 commit d7fb7ab

File tree

2 files changed

+125
-37
lines changed

2 files changed

+125
-37
lines changed

rust/generate-schema/src/codegen/grammar.rs

Lines changed: 99 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -180,8 +180,12 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
180180
.enums
181181
.iter()
182182
.map(|en| {
183-
let variants: Vec<_> =
184-
en.variants.iter().map(|var| format_ident!("{}", var)).sorted().collect();
183+
let variants: Vec<_> = en
184+
.variants
185+
.iter()
186+
.map(|var| format_ident!("{}", var))
187+
.sorted()
188+
.collect();
185189
let name = format_ident!("{}", en.name);
186190
let kinds: Vec<_> = variants
187191
.iter()
@@ -311,8 +315,10 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
311315
let enum_names = grammar.enums.iter().map(|it| &it.name);
312316
let node_names = grammar.nodes.iter().map(|it| &it.name);
313317

314-
let display_impls =
315-
enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
318+
let display_impls = enum_names
319+
.chain(node_names.clone())
320+
.map(|it| format_ident!("{}", it))
321+
.map(|name| {
316322
quote! {
317323
impl std::fmt::Display for #name {
318324
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -356,8 +362,11 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
356362

357363
let mut res = String::with_capacity(ast.len() * 2);
358364

359-
let mut docs =
360-
grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
365+
let mut docs = grammar
366+
.nodes
367+
.iter()
368+
.map(|it| &it.doc)
369+
.chain(grammar.enums.iter().map(|it| &it.doc));
361370

362371
for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
363372
res.push_str(chunk);
@@ -393,16 +402,21 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
393402
quote! { #(#cs)* }
394403
}
395404
});
396-
let punctuation =
397-
grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
405+
let punctuation = grammar
406+
.punct
407+
.iter()
408+
.map(|(_token, name)| format_ident!("{}", name))
409+
.collect::<Vec<_>>();
398410

399411
let fmt_kw_as_variant = |&name| match name {
400412
"Self" => format_ident!("SELF_TYPE_KW"),
401413
name => format_ident!("{}_KW", to_upper_snake_case(name)),
402414
};
403415
let strict_keywords = grammar.keywords;
404-
let strict_keywords_variants =
405-
strict_keywords.iter().map(fmt_kw_as_variant).collect::<Vec<_>>();
416+
let strict_keywords_variants = strict_keywords
417+
.iter()
418+
.map(fmt_kw_as_variant)
419+
.collect::<Vec<_>>();
406420
let strict_keywords_tokens = strict_keywords.iter().map(|it| format_ident!("{it}"));
407421

408422
let edition_dependent_keywords_variants_match_arm = grammar
@@ -425,15 +439,23 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
425439
.iter()
426440
.map(|(kw, _)| fmt_kw_as_variant(kw))
427441
.collect::<Vec<_>>();
428-
let edition_dependent_keywords_tokens =
429-
grammar.edition_dependent_keywords.iter().map(|(it, _)| format_ident!("{it}"));
442+
let edition_dependent_keywords_tokens = grammar
443+
.edition_dependent_keywords
444+
.iter()
445+
.map(|(it, _)| format_ident!("{it}"));
430446

431447
let contextual_keywords = grammar.contextual_keywords;
432-
let contextual_keywords_variants =
433-
contextual_keywords.iter().map(fmt_kw_as_variant).collect::<Vec<_>>();
448+
let contextual_keywords_variants = contextual_keywords
449+
.iter()
450+
.map(fmt_kw_as_variant)
451+
.collect::<Vec<_>>();
434452
let contextual_keywords_tokens = contextual_keywords.iter().map(|it| format_ident!("{it}"));
435453
let contextual_keywords_str_match_arm = grammar.contextual_keywords.iter().map(|kw| {
436-
match grammar.edition_dependent_keywords.iter().find(|(ed_kw, _)| ed_kw == kw) {
454+
match grammar
455+
.edition_dependent_keywords
456+
.iter()
457+
.find(|(ed_kw, _)| ed_kw == kw)
458+
{
437459
Some((_, ed)) => quote! { #kw if edition < #ed },
438460
None => quote! { #kw },
439461
}
@@ -443,7 +465,11 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
443465
.iter()
444466
.map(|kw_s| {
445467
let kw = fmt_kw_as_variant(kw_s);
446-
match grammar.edition_dependent_keywords.iter().find(|(ed_kw, _)| ed_kw == kw_s) {
468+
match grammar
469+
.edition_dependent_keywords
470+
.iter()
471+
.find(|(ed_kw, _)| ed_kw == kw_s)
472+
{
447473
Some((_, ed)) => quote! { #kw if edition < #ed },
448474
None => quote! { #kw },
449475
}
@@ -457,12 +483,23 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
457483
.dedup()
458484
.collect::<Vec<_>>();
459485

460-
let literals =
461-
grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
486+
let literals = grammar
487+
.literals
488+
.iter()
489+
.map(|name| format_ident!("{}", name))
490+
.collect::<Vec<_>>();
462491

463-
let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
492+
let tokens = grammar
493+
.tokens
494+
.iter()
495+
.map(|name| format_ident!("{}", name))
496+
.collect::<Vec<_>>();
464497

465-
let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
498+
let nodes = grammar
499+
.nodes
500+
.iter()
501+
.map(|name| format_ident!("{}", name))
502+
.collect::<Vec<_>>();
466503

467504
let ast = quote! {
468505
#![allow(bad_style, missing_docs, unreachable_pub)]
@@ -569,7 +606,10 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
569606
}
570607
};
571608

572-
add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string()))
609+
add_preamble(
610+
crate::flags::CodegenType::Grammar,
611+
reformat(ast.to_string()),
612+
)
573613
}
574614

575615
fn to_upper_snake_case(s: &str) -> String {
@@ -719,13 +759,23 @@ pub(crate) fn lower(grammar: &Grammar) -> AstSrc {
719759
let rule = &grammar[node].rule;
720760
match lower_enum(grammar, rule) {
721761
Some(variants) => {
722-
let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
762+
let enum_src = AstEnumSrc {
763+
doc: Vec::new(),
764+
name,
765+
traits: Vec::new(),
766+
variants,
767+
};
723768
res.enums.push(enum_src);
724769
}
725770
None => {
726771
let mut fields = Vec::new();
727772
lower_rule(&mut fields, grammar, None, rule);
728-
res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
773+
res.nodes.push(AstNodeSrc {
774+
doc: Vec::new(),
775+
name,
776+
traits: Vec::new(),
777+
fields,
778+
});
729779
}
730780
}
731781
}
@@ -776,7 +826,11 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
776826
Rule::Node(node) => {
777827
let ty = grammar[*node].name.clone();
778828
let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
779-
let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
829+
let field = Field::Node {
830+
name,
831+
ty,
832+
cardinality: Cardinality::Optional,
833+
};
780834
acc.push(field);
781835
}
782836
Rule::Token(token) => {
@@ -791,8 +845,14 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
791845
Rule::Rep(inner) => {
792846
if let Rule::Node(node) = &**inner {
793847
let ty = grammar[*node].name.clone();
794-
let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
795-
let field = Field::Node { name, ty, cardinality: Cardinality::Many };
848+
let name = label
849+
.cloned()
850+
.unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
851+
let field = Field::Node {
852+
name,
853+
ty,
854+
cardinality: Cardinality::Many,
855+
};
796856
acc.push(field);
797857
return;
798858
}
@@ -863,8 +923,14 @@ fn lower_separated_list(
863923
return false;
864924
}
865925
let ty = grammar[*node].name.clone();
866-
let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
867-
let field = Field::Node { name, ty, cardinality: Cardinality::Many };
926+
let name = label
927+
.cloned()
928+
.unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
929+
let field = Field::Node {
930+
name,
931+
ty,
932+
cardinality: Cardinality::Many,
933+
};
868934
acc.push(field);
869935
true
870936
}
@@ -900,7 +966,11 @@ fn extract_enums(ast: &mut AstSrc) {
900966
node.remove_field(to_remove);
901967
let ty = enm.name.clone();
902968
let name = to_lower_snake_case(&ty);
903-
node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
969+
node.fields.push(Field::Node {
970+
name,
971+
ty,
972+
cardinality: Cardinality::Optional,
973+
});
904974
}
905975
}
906976
}

rust/generate-schema/src/codegen/grammar/ast_src.rs

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,15 @@ const RESERVED: &[&str] = &[
110110
];
111111
// keywords that are keywords only in specific parse contexts
112112
#[doc(alias = "WEAK_KEYWORDS")]
113-
const CONTEXTUAL_KEYWORDS: &[&str] =
114-
&["macro_rules", "union", "default", "raw", "dyn", "auto", "yeet"];
113+
const CONTEXTUAL_KEYWORDS: &[&str] = &[
114+
"macro_rules",
115+
"union",
116+
"default",
117+
"raw",
118+
"dyn",
119+
"auto",
120+
"yeet",
121+
];
115122
// keywords we use for special macro expansions
116123
const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &["builtin", "offset_of", "format_args", "asm"];
117124
// keywords that are keywords depending on the edition
@@ -128,8 +135,11 @@ pub(crate) fn generate_kind_src(
128135
enums: &[AstEnumSrc],
129136
grammar: &ungrammar::Grammar,
130137
) -> KindsSrc {
131-
let mut contextual_keywords: Vec<&_> =
132-
CONTEXTUAL_KEYWORDS.iter().chain(CONTEXTUAL_BUILTIN_KEYWORDS).copied().collect();
138+
let mut contextual_keywords: Vec<&_> = CONTEXTUAL_KEYWORDS
139+
.iter()
140+
.chain(CONTEXTUAL_BUILTIN_KEYWORDS)
141+
.copied()
142+
.collect();
133143

134144
let mut keywords: Vec<&_> = Vec::new();
135145
let mut tokens: Vec<&_> = TOKENS.to_vec();
@@ -162,9 +172,13 @@ pub(crate) fn generate_kind_src(
162172
}
163173
}
164174
});
165-
PUNCT.iter().zip(used_puncts).filter(|(_, used)| !used).for_each(|((punct, _), _)| {
166-
panic!("Punctuation {punct:?} is not used in grammar");
167-
});
175+
PUNCT
176+
.iter()
177+
.zip(used_puncts)
178+
.filter(|(_, used)| !used)
179+
.for_each(|((punct, _), _)| {
180+
panic!("Punctuation {punct:?} is not used in grammar");
181+
});
168182
keywords.extend(RESERVED.iter().copied());
169183
keywords.sort();
170184
keywords.dedup();
@@ -226,7 +240,11 @@ pub(crate) struct AstNodeSrc {
226240
#[derive(Debug, Eq, PartialEq)]
227241
pub(crate) enum Field {
228242
Token(String),
229-
Node { name: String, ty: String, cardinality: Cardinality },
243+
Node {
244+
name: String,
245+
ty: String,
246+
cardinality: Cardinality,
247+
},
230248
}
231249

232250
#[derive(Debug, Eq, PartialEq)]

0 commit comments

Comments
 (0)